[ 786.457604] env[69992]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69992) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 786.457956] env[69992]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69992) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 786.458086] env[69992]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69992) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 786.458442] env[69992]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 786.556182] env[69992]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69992) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 786.565888] env[69992]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69992) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 786.608948] env[69992]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 787.170108] env[69992]: INFO nova.virt.driver [None req-6ca1efae-e636-4359-8615-9f0ffbd01897 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 787.240957] env[69992]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.241148] env[69992]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.241252] env[69992]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69992) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 790.446235] env[69992]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-7370cf92-7a6f-40e3-a712-33f555de8375 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.462193] env[69992]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69992) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 790.462637] env[69992]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-0e141ae6-3160-40b4-83d3-d203be477cd1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.493707] env[69992]: INFO oslo_vmware.api [-] Successfully established new session; session ID is f9d0e. [ 790.493848] env[69992]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.253s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.494446] env[69992]: INFO nova.virt.vmwareapi.driver [None req-6ca1efae-e636-4359-8615-9f0ffbd01897 None None] VMware vCenter version: 7.0.3 [ 790.497862] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69493d6-24f0-4b05-88fa-5bf14373edcc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.515122] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362cb5c0-c781-4207-90b1-4c4e0a2e1914 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.520946] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c520654c-170c-4a81-b3c2-a8289e50f7a4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.527490] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8a8d8d-2869-484a-98b2-2bbd30c29d43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.541222] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b12e0f-9711-45fc-866e-01adbb098762 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.546851] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070e071b-7936-40f8-a9ff-12c8063a72d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.576251] env[69992]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-1b7ae0bc-4923-450f-9f32-6607466cd12c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.581245] env[69992]: DEBUG nova.virt.vmwareapi.driver [None req-6ca1efae-e636-4359-8615-9f0ffbd01897 None None] Extension org.openstack.compute already exists. {{(pid=69992) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 790.584018] env[69992]: INFO nova.compute.provider_config [None req-6ca1efae-e636-4359-8615-9f0ffbd01897 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 791.088050] env[69992]: DEBUG nova.context [None req-6ca1efae-e636-4359-8615-9f0ffbd01897 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4c6a070d-a6ed-4731-8fe9-fd1365c508f9(cell1) {{(pid=69992) load_cells /opt/stack/nova/nova/context.py:464}} [ 791.089841] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.090089] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.090853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.091296] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Acquiring lock "4c6a070d-a6ed-4731-8fe9-fd1365c508f9" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.091491] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Lock "4c6a070d-a6ed-4731-8fe9-fd1365c508f9" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.092577] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Lock "4c6a070d-a6ed-4731-8fe9-fd1365c508f9" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.113996] env[69992]: INFO dbcounter [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Registered counter for database nova_cell0 [ 791.122088] env[69992]: INFO dbcounter [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Registered counter for database nova_cell1 [ 791.125362] env[69992]: DEBUG oslo_db.sqlalchemy.engines [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69992) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 791.125748] env[69992]: DEBUG oslo_db.sqlalchemy.engines [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69992) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 791.130672] env[69992]: ERROR nova.db.main.api [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 791.130672] env[69992]: result = function(*args, **kwargs) [ 791.130672] env[69992]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.130672] env[69992]: return func(*args, **kwargs) [ 791.130672] env[69992]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 791.130672] env[69992]: result = fn(*args, **kwargs) [ 791.130672] env[69992]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 791.130672] env[69992]: return f(*args, **kwargs) [ 791.130672] env[69992]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 791.130672] env[69992]: return db.service_get_minimum_version(context, binaries) [ 791.130672] env[69992]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 791.130672] env[69992]: _check_db_access() [ 791.130672] env[69992]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 791.130672] env[69992]: stacktrace = ''.join(traceback.format_stack()) [ 791.130672] env[69992]: [ 791.131512] env[69992]: ERROR nova.db.main.api [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 791.131512] env[69992]: result = function(*args, **kwargs) [ 791.131512] env[69992]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.131512] env[69992]: return func(*args, **kwargs) [ 791.131512] env[69992]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 791.131512] env[69992]: result = fn(*args, **kwargs) [ 791.131512] env[69992]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 791.131512] env[69992]: return f(*args, **kwargs) [ 791.131512] env[69992]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 791.131512] env[69992]: return db.service_get_minimum_version(context, binaries) [ 791.131512] env[69992]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 791.131512] env[69992]: _check_db_access() [ 791.131512] env[69992]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 791.131512] env[69992]: stacktrace = ''.join(traceback.format_stack()) [ 791.131512] env[69992]: [ 791.131953] env[69992]: WARNING nova.objects.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Failed to get minimum service version for cell 4c6a070d-a6ed-4731-8fe9-fd1365c508f9 [ 791.132045] env[69992]: WARNING nova.objects.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 791.132496] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Acquiring lock "singleton_lock" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.132654] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Acquired lock "singleton_lock" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.132890] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Releasing lock "singleton_lock" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.133225] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Full set of CONF: {{(pid=69992) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 791.133367] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ******************************************************************************** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 791.133497] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] Configuration options gathered from: {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 791.133632] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 791.133823] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 791.133949] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ================================================================================ {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 791.134172] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] allow_resize_to_same_host = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.134342] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] arq_binding_timeout = 300 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.134477] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] backdoor_port = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.134629] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] backdoor_socket = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.134801] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] block_device_allocate_retries = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.134962] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] block_device_allocate_retries_interval = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.135152] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cert = self.pem {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.135324] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.135496] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute_monitors = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.135669] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] config_dir = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.135841] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] config_drive_format = iso9660 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.135975] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.136157] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] config_source = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.136353] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] console_host = devstack {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.136493] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] control_exchange = nova {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.136652] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cpu_allocation_ratio = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.136812] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] daemon = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.136979] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] debug = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.137151] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] default_access_ip_network_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.137317] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] default_availability_zone = nova {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.137473] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] default_ephemeral_format = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.137660] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] default_green_pool_size = 1000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.137905] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.138081] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] default_schedule_zone = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.138243] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] disk_allocation_ratio = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.138406] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] enable_new_services = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.138584] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] enabled_apis = ['osapi_compute'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.138749] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] enabled_ssl_apis = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.138909] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] flat_injected = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.139081] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] force_config_drive = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.139245] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] force_raw_images = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.139417] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] graceful_shutdown_timeout = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.139580] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] heal_instance_info_cache_interval = -1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.139800] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] host = cpu-1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.139975] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.140149] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] initial_disk_allocation_ratio = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.140311] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] initial_ram_allocation_ratio = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.140556] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.140726] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_build_timeout = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.140886] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_delete_interval = 300 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.141074] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_format = [instance: %(uuid)s] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.141246] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_name_template = instance-%08x {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.141407] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_usage_audit = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.141579] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_usage_audit_period = month {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.141745] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.141913] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] instances_path = /opt/stack/data/nova/instances {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.142091] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] internal_service_availability_zone = internal {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.142253] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] key = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.142413] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] live_migration_retry_count = 30 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.142613] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_color = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.142786] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_config_append = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.142957] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.143133] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_dir = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.143296] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.143461] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_options = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.143644] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_rotate_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.143817] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_rotate_interval_type = days {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.143987] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] log_rotation_type = none {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.144132] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.144260] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.144429] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.144594] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.144721] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.144883] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] long_rpc_timeout = 1800 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.145056] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] max_concurrent_builds = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.145220] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] max_concurrent_live_migrations = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.145380] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] max_concurrent_snapshots = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.145538] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] max_local_block_devices = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.145698] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] max_logfile_count = 30 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.145856] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] max_logfile_size_mb = 200 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.146029] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] maximum_instance_delete_attempts = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.146206] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metadata_listen = 0.0.0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.146393] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metadata_listen_port = 8775 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.146575] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metadata_workers = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.146746] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] migrate_max_retries = -1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.146915] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] mkisofs_cmd = genisoimage {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.147136] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] my_block_storage_ip = 10.180.1.21 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.147269] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] my_ip = 10.180.1.21 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.147478] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.147646] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] network_allocate_retries = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.147824] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.147994] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] osapi_compute_listen = 0.0.0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.148175] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] osapi_compute_listen_port = 8774 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.148343] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] osapi_compute_unique_server_name_scope = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.148513] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] osapi_compute_workers = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.148678] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] password_length = 12 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.148838] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] periodic_enable = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.148998] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] periodic_fuzzy_delay = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.149183] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] pointer_model = usbtablet {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.149354] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] preallocate_images = none {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.149541] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] publish_errors = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.149679] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] pybasedir = /opt/stack/nova {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.149842] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ram_allocation_ratio = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150010] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] rate_limit_burst = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150188] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] rate_limit_except_level = CRITICAL {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150350] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] rate_limit_interval = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150511] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reboot_timeout = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150671] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reclaim_instance_interval = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150828] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] record = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.150997] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reimage_timeout_per_gb = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.151181] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] report_interval = 120 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.151343] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] rescue_timeout = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.151504] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reserved_host_cpus = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.151664] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reserved_host_disk_mb = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.151821] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reserved_host_memory_mb = 512 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.151976] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] reserved_huge_pages = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.152148] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] resize_confirm_window = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.152308] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] resize_fs_using_block_device = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.152520] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] resume_guests_state_on_host_boot = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.152706] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.152874] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] rpc_response_timeout = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.153047] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] run_external_periodic_tasks = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.153222] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] running_deleted_instance_action = reap {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.153384] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] running_deleted_instance_poll_interval = 1800 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.153589] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] running_deleted_instance_timeout = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.153768] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler_instance_sync_interval = 120 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.153944] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_down_time = 720 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.154131] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] servicegroup_driver = db {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.154291] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] shell_completion = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.154452] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] shelved_offload_time = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.154613] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] shelved_poll_interval = 3600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.154783] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] shutdown_timeout = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.154943] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] source_is_ipv6 = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.155220] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ssl_only = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.155357] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.155600] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] sync_power_state_interval = 600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.155712] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] sync_power_state_pool_size = 1000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.155883] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] syslog_log_facility = LOG_USER {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.156057] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] tempdir = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.156224] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] timeout_nbd = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.156395] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] transport_url = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.156557] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] update_resources_interval = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.156720] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] use_cow_images = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.156880] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] use_journal = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.157048] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] use_json = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.157212] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] use_rootwrap_daemon = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.157371] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] use_stderr = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.157530] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] use_syslog = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.157687] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vcpu_pin_set = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.157856] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plugging_is_fatal = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.158034] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plugging_timeout = 300 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.158215] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] virt_mkfs = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.158405] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] volume_usage_poll_interval = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.158592] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] watch_log_file = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.158772] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] web = /usr/share/spice-html5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 791.158958] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.159143] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.159309] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.159482] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_concurrency.disable_process_locking = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.160055] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.160255] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.160434] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.160613] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.160792] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.160965] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.161180] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.auth_strategy = keystone {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.161353] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.compute_link_prefix = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.161552] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.161743] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.dhcp_domain = novalocal {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.161917] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.enable_instance_password = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.162101] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.glance_link_prefix = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.162275] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.162482] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.162650] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.instance_list_per_project_cells = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.162817] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.list_records_by_skipping_down_cells = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.162982] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.local_metadata_per_cell = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.163169] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.max_limit = 1000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.163339] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.metadata_cache_expiration = 15 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.163540] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.neutron_default_tenant_id = default {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.163731] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.response_validation = warn {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.163907] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.use_neutron_default_nets = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.164092] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.164261] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.164455] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.164654] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.164832] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_dynamic_targets = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.164997] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_jsonfile_path = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.165208] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.165403] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.backend = dogpile.cache.memcached {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.165577] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.backend_argument = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.165742] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.backend_expiration_time = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.165915] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.config_prefix = cache.oslo {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.166096] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.dead_timeout = 60.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.166267] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.debug_cache_backend = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.166432] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.enable_retry_client = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.166596] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.enable_socket_keepalive = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.166767] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.enabled = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.166933] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.enforce_fips_mode = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.167112] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.expiration_time = 600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.167280] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.hashclient_retry_attempts = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.167463] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.hashclient_retry_delay = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.167645] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_dead_retry = 300 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.167810] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_password = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.167976] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.168153] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.168319] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_pool_maxsize = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.168482] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.168646] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_sasl_enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.168824] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.168990] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_socket_timeout = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.169166] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.memcache_username = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.169332] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.proxies = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.169495] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_db = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.169659] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_password = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.169830] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_sentinel_service_name = mymaster {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.170013] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.170195] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_server = localhost:6379 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.170383] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_socket_timeout = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.170557] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.redis_username = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.170727] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.retry_attempts = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.170894] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.retry_delay = 0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.171681] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.socket_keepalive_count = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.171681] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.socket_keepalive_idle = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.171681] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.socket_keepalive_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.171681] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.tls_allowed_ciphers = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.171856] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.tls_cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.171856] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.tls_certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.172060] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.tls_enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.172197] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cache.tls_keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.172371] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.172575] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.auth_type = password {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.172749] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.172928] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.catalog_info = volumev3::publicURL {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.173111] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.173285] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.173476] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.cross_az_attach = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.173671] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.debug = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.173842] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.endpoint_template = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.174021] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.http_retries = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.174191] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.174350] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.174522] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.os_region_name = RegionOne {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.174688] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.174853] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cinder.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.175036] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.175203] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.cpu_dedicated_set = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.175364] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.cpu_shared_set = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.175527] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.image_type_exclude_list = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.175689] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.175853] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.max_concurrent_disk_ops = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.176026] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.max_disk_devices_to_attach = -1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.176194] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.176365] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.176562] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.resource_provider_association_refresh = 300 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.176735] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.176901] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.shutdown_retry_interval = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.177098] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.177283] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] conductor.workers = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.177462] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] console.allowed_origins = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.177625] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] console.ssl_ciphers = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.177797] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] console.ssl_minimum_version = default {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.177969] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] consoleauth.enforce_session_timeout = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.178153] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] consoleauth.token_ttl = 600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.178334] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.178490] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.178654] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.178813] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.178974] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.179149] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.179314] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.179494] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.179671] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.179833] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.179993] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.180169] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.180332] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.180502] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.service_type = accelerator {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.180667] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.180830] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.180988] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.181159] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.181344] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.181506] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] cyborg.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.181679] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.asyncio_connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.181841] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.asyncio_slave_connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.182026] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.backend = sqlalchemy {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.182204] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.182373] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.connection_debug = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.182582] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.connection_parameters = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.182763] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.connection_recycle_time = 3600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.182932] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.connection_trace = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.183118] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.db_inc_retry_interval = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.183289] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.db_max_retries = 20 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.183457] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.db_max_retry_interval = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.183643] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.db_retry_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.183818] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.max_overflow = 50 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.183983] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.max_pool_size = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.184163] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.max_retries = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.184337] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.184500] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.mysql_wsrep_sync_wait = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.184660] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.pool_timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.184824] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.retry_interval = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.184983] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.slave_connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.185159] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.sqlite_synchronous = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.185324] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] database.use_db_reconnect = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.185495] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.asyncio_connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.185684] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.asyncio_slave_connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.185861] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.backend = sqlalchemy {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.186043] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.186214] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.connection_debug = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.186388] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.connection_parameters = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.186557] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.connection_recycle_time = 3600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.186725] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.connection_trace = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.186889] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.db_inc_retry_interval = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.187067] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.db_max_retries = 20 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.187237] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.db_max_retry_interval = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.187403] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.db_retry_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.187567] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.max_overflow = 50 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.187734] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.max_pool_size = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.187895] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.max_retries = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.188079] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.188242] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.188404] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.pool_timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.188600] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.retry_interval = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.188770] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.slave_connection = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.188934] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] api_database.sqlite_synchronous = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.189123] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] devices.enabled_mdev_types = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.189305] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.189479] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ephemeral_storage_encryption.default_format = luks {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.189647] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ephemeral_storage_encryption.enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.189810] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.189982] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.api_servers = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.190164] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.190328] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.190493] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.190656] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.190818] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.190983] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.debug = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.191166] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.default_trusted_certificate_ids = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.191344] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.enable_certificate_validation = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.191567] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.enable_rbd_download = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.191748] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.191922] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.192101] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.192267] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.192466] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.192672] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.num_retries = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.192852] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.rbd_ceph_conf = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.193030] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.rbd_connect_timeout = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.193207] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.rbd_pool = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.193375] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.rbd_user = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.193557] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.193787] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.193967] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.194160] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.service_type = image {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.194330] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.194495] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.194660] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.194824] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.195026] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.195200] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.verify_glance_signatures = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.195378] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] glance.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.195560] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] guestfs.debug = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.195735] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.195901] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.auth_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.196078] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.196242] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.196408] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.196568] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.196761] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.196933] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.197111] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.197276] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.197438] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.197599] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.197779] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.197945] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.198128] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.198305] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.service_type = shared-file-system {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.198473] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.share_apply_policy_timeout = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.198640] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.198805] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.198961] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.199137] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.199323] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.199488] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] manila.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.199684] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] mks.enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.200080] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.200282] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] image_cache.manager_interval = 2400 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.200540] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] image_cache.precache_concurrency = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.200723] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] image_cache.remove_unused_base_images = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.200901] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.201090] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.201277] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] image_cache.subdirectory_name = _base {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.201456] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.api_max_retries = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.201624] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.api_retry_interval = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.201788] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.201950] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.auth_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.202124] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.202289] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.202640] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.202849] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.conductor_group = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.203035] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.203206] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.203370] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.203546] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.203736] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.203905] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.204077] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.204250] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.peer_list = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.204414] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.204573] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.204744] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.serial_console_state_timeout = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.204902] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.205088] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.service_type = baremetal {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.205254] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.shard = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.205420] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.205580] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.205763] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.205938] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.206137] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.206304] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ironic.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.206490] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.206668] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] key_manager.fixed_key = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.206855] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.207030] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.barbican_api_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.207197] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.barbican_endpoint = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.207372] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.barbican_endpoint_type = public {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.207534] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.barbican_region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.207696] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.207855] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.208028] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.208197] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.208356] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.208521] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.number_of_retries = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.208686] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.retry_delay = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.208873] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.send_service_user_token = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.209054] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.209220] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.209384] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.verify_ssl = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.209544] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican.verify_ssl_path = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.209711] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.209873] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.auth_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.210045] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.210210] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.210438] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.210618] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.210777] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.210942] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.211114] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] barbican_service_user.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.211286] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.approle_role_id = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.211445] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.approle_secret_id = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.211618] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.kv_mountpoint = secret {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.211803] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.kv_path = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.211980] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.kv_version = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.212157] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.namespace = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.212316] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.root_token_id = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.212540] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.ssl_ca_crt_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.212728] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.timeout = 60.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.212895] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.use_ssl = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.213087] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.213261] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.213424] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.213603] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.213773] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.213933] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.214105] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.214268] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.214485] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.214672] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.214894] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.215072] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.215242] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.215404] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.215577] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.service_type = identity {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.215743] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.215902] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.216072] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.216235] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.216420] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.216583] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] keystone.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.216789] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.ceph_mount_options = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.217133] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.217320] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.connection_uri = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.217487] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_mode = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.217657] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_model_extra_flags = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.217886] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_models = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.218091] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_power_governor_high = performance {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.218267] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_power_governor_low = powersave {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.218432] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_power_management = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.218609] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.218783] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.device_detach_attempts = 8 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.218949] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.device_detach_timeout = 20 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.219132] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.disk_cachemodes = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.219297] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.disk_prefix = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.219462] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.enabled_perf_events = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.219625] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.file_backed_memory = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.219792] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.gid_maps = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.219954] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.hw_disk_discard = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.220126] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.hw_machine_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.220298] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_rbd_ceph_conf = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.220547] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.220727] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.220903] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_rbd_glance_store_name = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.221086] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_rbd_pool = rbd {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.221261] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_type = default {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.221424] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.images_volume_group = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.221590] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.inject_key = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.221758] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.inject_partition = -2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.221921] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.inject_password = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.222106] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.iscsi_iface = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.222275] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.iser_use_multipath = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.222502] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_bandwidth = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.222685] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.222856] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_downtime = 500 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.223061] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.223237] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.223465] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_inbound_addr = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.223667] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.223844] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_permit_post_copy = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.224014] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_scheme = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.224195] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_timeout_action = abort {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.224363] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_tunnelled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.224524] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_uri = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.224689] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.live_migration_with_native_tls = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.224851] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.max_queues = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.225022] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.225300] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.225474] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.nfs_mount_options = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.225781] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.225957] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.226140] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.num_iser_scan_tries = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.226305] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.num_memory_encrypted_guests = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.226536] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.226719] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.num_pcie_ports = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.226887] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.num_volume_scan_tries = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.227066] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.pmem_namespaces = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.227231] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.quobyte_client_cfg = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.227536] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.227714] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rbd_connect_timeout = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.227881] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.228058] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.228223] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rbd_secret_uuid = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.228384] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rbd_user = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.228548] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.228723] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.remote_filesystem_transport = ssh {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.228886] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rescue_image_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.229055] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rescue_kernel_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.229227] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rescue_ramdisk_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.229392] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.229627] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.rx_queue_size = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.229804] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.smbfs_mount_options = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.230112] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.230294] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.snapshot_compression = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.230459] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.snapshot_image_format = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.230697] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.230868] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.sparse_logical_volumes = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.231046] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.swtpm_enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.231224] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.swtpm_group = tss {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.231393] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.swtpm_user = tss {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.231564] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.sysinfo_serial = unique {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.231727] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.tb_cache_size = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.231884] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.tx_queue_size = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.232060] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.uid_maps = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.232227] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.use_virtio_for_bridges = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.232399] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.virt_type = kvm {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.232665] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.volume_clear = zero {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.232840] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.volume_clear_size = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.233021] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.volume_enforce_multipath = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.233196] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.volume_use_multipath = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.233362] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_cache_path = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.233553] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.233752] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_mount_group = qemu {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.233927] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_mount_opts = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.234115] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.234434] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.234631] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.vzstorage_mount_user = stack {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.234833] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.235038] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.235226] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.auth_type = password {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.235444] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.235638] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.235808] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.235971] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.236148] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.236321] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.default_floating_pool = public {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.236488] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.236649] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.extension_sync_interval = 600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.236812] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.http_retries = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.236978] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.237151] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.237311] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.237482] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.237642] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.237815] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.ovs_bridge = br-int {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.237980] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.physnets = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.238164] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.region_name = RegionOne {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.238328] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.238565] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.service_metadata_proxy = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.238743] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.238918] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.service_type = network {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.239099] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.239262] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.239424] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.239584] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.239768] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.239930] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] neutron.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.240118] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] notifications.bdms_in_notifications = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.240300] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] notifications.default_level = INFO {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.240470] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] notifications.include_share_mapping = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.240651] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] notifications.notification_format = unversioned {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.240819] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] notifications.notify_on_state_change = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.240994] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.241188] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] pci.alias = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.241359] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] pci.device_spec = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.241589] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] pci.report_in_placement = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.241777] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.241954] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.auth_type = password {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.242146] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.242309] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.242528] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.242710] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.242875] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.243048] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.243211] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.default_domain_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.243371] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.default_domain_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.243538] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.domain_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.243721] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.domain_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.243886] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.244060] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.244224] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.244385] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.244600] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.244809] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.password = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.245012] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.project_domain_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.245198] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.project_domain_name = Default {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.245368] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.project_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.245543] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.project_name = service {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.245716] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.region_name = RegionOne {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.245879] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.246055] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.246232] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.service_type = placement {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.246399] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.246561] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.246722] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.246884] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.system_scope = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.247051] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.247215] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.trust_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.247374] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.user_domain_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.247616] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.user_domain_name = Default {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.247785] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.user_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.247962] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.username = nova {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.248161] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.248325] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] placement.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.248508] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.cores = 20 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.248675] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.count_usage_from_placement = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.248847] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.249026] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.injected_file_content_bytes = 10240 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.249197] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.injected_file_path_length = 255 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.249366] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.injected_files = 5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.249534] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.instances = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.249701] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.key_pairs = 100 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.249870] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.metadata_items = 128 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.250042] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.ram = 51200 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.250209] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.recheck_quota = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.250437] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.server_group_members = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.250621] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.server_groups = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.250836] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.251030] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] quota.unified_limits_resource_strategy = require {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.251198] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.252553] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.252771] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.image_metadata_prefilter = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.252959] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.253147] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.max_attempts = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.253323] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.max_placement_results = 1000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.253558] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.253692] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.query_placement_for_image_type_support = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.253859] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.254056] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] scheduler.workers = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.254244] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.254433] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.254647] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.254825] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.254994] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.255180] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.255355] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.255570] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.255759] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.host_subset_size = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.255933] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.256109] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.256282] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.256451] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.256631] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.256798] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.isolated_hosts = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.256967] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.isolated_images = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.257147] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.257313] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.257481] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.257646] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.pci_in_placement = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.257811] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.257975] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.258154] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.258319] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.258484] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.258652] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.258820] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.track_instance_changes = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.258998] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.259185] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metrics.required = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.259355] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metrics.weight_multiplier = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.259521] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.259691] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] metrics.weight_setting = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.260031] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.260213] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] serial_console.enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.260419] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] serial_console.port_range = 10000:20000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.260607] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.260784] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.260954] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] serial_console.serialproxy_port = 6083 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.261139] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.261316] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.auth_type = password {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.261482] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.261646] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.261812] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.261979] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.262153] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.262325] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.send_service_user_token = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.262515] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.262688] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] service_user.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.262860] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.agent_enabled = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.263042] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.263355] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.263586] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.263763] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.html5proxy_port = 6082 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.263929] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.image_compression = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.264106] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.jpeg_compression = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.264271] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.playback_compression = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.264437] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.require_secure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.264612] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.server_listen = 127.0.0.1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.264786] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.265077] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.265255] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.streaming_mode = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.265422] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] spice.zlib_compression = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.265635] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] upgrade_levels.baseapi = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.265847] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] upgrade_levels.compute = auto {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.266028] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] upgrade_levels.conductor = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.266197] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] upgrade_levels.scheduler = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.266368] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.266561] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.auth_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.266742] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.266905] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.267084] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.267253] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.267417] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.267582] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.267746] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vendordata_dynamic_auth.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.267923] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.api_retry_count = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.268107] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.ca_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.268288] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.cache_prefix = devstack-image-cache {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.268460] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.cluster_name = testcl1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.268631] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.connection_pool_size = 10 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.268794] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.console_delay_seconds = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.268966] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.datastore_regex = ^datastore.* {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.269189] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.269368] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.host_password = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.269539] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.host_port = 443 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.269711] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.host_username = administrator@vsphere.local {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.269878] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.insecure = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.270052] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.integration_bridge = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.270220] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.maximum_objects = 100 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.270394] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.pbm_default_policy = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.270565] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.pbm_enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.270728] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.pbm_wsdl_location = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.270897] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.271068] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.serial_port_proxy_uri = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.271234] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.serial_port_service_uri = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.271404] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.task_poll_interval = 0.5 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.271578] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.use_linked_clone = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.271750] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.vnc_keymap = en-us {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.271917] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.vnc_port = 5900 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.272096] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vmware.vnc_port_total = 10000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.272286] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.auth_schemes = ['none'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.272503] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.272803] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.272994] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.273184] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.novncproxy_port = 6080 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.273376] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.server_listen = 127.0.0.1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.273568] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.273729] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.vencrypt_ca_certs = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.273890] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.vencrypt_client_cert = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.274062] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vnc.vencrypt_client_key = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.274248] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.274413] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.disable_deep_image_inspection = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.274576] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.274765] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.274899] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.275071] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.disable_rootwrap = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.275234] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.enable_numa_live_migration = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.275395] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.275552] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.275713] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.275872] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.libvirt_disable_apic = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.276040] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.276207] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.276366] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.276528] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.276686] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.276850] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.277167] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.277167] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.277327] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.277488] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.277694] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.277843] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.client_socket_timeout = 900 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.278015] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.default_pool_size = 1000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.278187] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.keep_alive = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.278353] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.max_header_line = 16384 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.278525] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.secure_proxy_ssl_header = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.278713] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.ssl_ca_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.278878] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.ssl_cert_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.279052] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.ssl_key_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.279223] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.tcp_keepidle = 600 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.279403] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.279576] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] zvm.ca_file = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.279735] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] zvm.cloud_connector_url = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.280040] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.280219] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] zvm.reachable_timeout = 300 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.280395] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.280575] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.280753] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.connection_string = messaging:// {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.280919] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.enabled = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.281100] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.es_doc_type = notification {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.281267] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.es_scroll_size = 10000 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.281437] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.es_scroll_time = 2m {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.281599] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.filter_error_trace = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.281770] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.hmac_keys = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.281936] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.sentinel_service_name = mymaster {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.282115] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.socket_timeout = 0.1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.282281] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.trace_requests = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.282473] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler.trace_sqlalchemy = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.282709] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler_jaeger.process_tags = {} {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.282890] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler_jaeger.service_name_prefix = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.283063] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] profiler_otlp.service_name_prefix = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.283234] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] remote_debug.host = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.283394] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] remote_debug.port = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.283571] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.283746] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.283913] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.284090] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.284259] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.284424] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.284588] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.284751] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.284921] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.285107] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.285269] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.285443] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.285648] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.285842] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.286012] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.286194] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.286357] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.286521] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.286693] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.286889] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.287029] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.287201] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.287367] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.287530] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.287693] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.287855] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.288026] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.288194] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.288354] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.288516] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.288679] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.ssl = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.288856] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.289032] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.289200] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.289371] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.289541] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.ssl_version = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.289704] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.289890] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.290068] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_notifications.retry = -1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.290249] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.290453] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_messaging_notifications.transport_url = **** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.290640] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.auth_section = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.290806] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.auth_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.290967] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.cafile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.291143] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.certfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.291307] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.collect_timing = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.291467] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.connect_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.291627] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.connect_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.291783] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.endpoint_id = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.291954] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.endpoint_interface = publicURL {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.292128] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.endpoint_override = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.292289] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.endpoint_region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.292472] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.endpoint_service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.292622] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.endpoint_service_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.292788] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.insecure = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.292945] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.keyfile = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.293114] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.max_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.293272] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.min_version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.293447] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.region_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.293597] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.retriable_status_codes = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.293749] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.service_name = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.293902] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.service_type = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.294072] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.split_loggers = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.294232] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.status_code_retries = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.294388] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.status_code_retry_delay = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.294570] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.timeout = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.294774] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.valid_interfaces = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.294898] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_limit.version = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.295077] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_reports.file_event_handler = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.295247] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.295408] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] oslo_reports.log_dir = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.295580] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.295742] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.295898] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.296076] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.296244] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.296401] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.296594] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.296747] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_ovs_privileged.group = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.296905] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.297082] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.297248] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.297407] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] vif_plug_ovs_privileged.user = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.297579] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.flat_interface = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.297762] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.297938] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.298124] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.298300] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.298473] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.298641] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.298808] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.298987] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.299197] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.isolate_vif = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.299371] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.299542] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.299721] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.299892] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.ovsdb_interface = native {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.300066] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] os_vif_ovs.per_port_bridge = False {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.300244] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] privsep_osbrick.capabilities = [21] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.300446] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] privsep_osbrick.group = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.300593] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] privsep_osbrick.helper_command = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.300763] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.300932] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.301105] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] privsep_osbrick.user = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.301281] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.301443] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] nova_sys_admin.group = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.301605] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] nova_sys_admin.helper_command = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.301772] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.301934] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.302101] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] nova_sys_admin.user = None {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 791.302236] env[69992]: DEBUG oslo_service.backend.eventlet.service [None req-c71f34ec-540c-456d-8eaf-85aed0ca12fe None None] ******************************************************************************** {{(pid=69992) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 791.302700] env[69992]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 791.807179] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Getting list of instances from cluster (obj){ [ 791.807179] env[69992]: value = "domain-c8" [ 791.807179] env[69992]: _type = "ClusterComputeResource" [ 791.807179] env[69992]: } {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 791.808242] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce83a778-f375-429b-8945-04a5571ad93f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.817277] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Got total of 0 instances {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 791.817860] env[69992]: WARNING nova.virt.vmwareapi.driver [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 791.818337] env[69992]: INFO nova.virt.node [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Generated node identity 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 [ 791.818583] env[69992]: INFO nova.virt.node [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Wrote node identity 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 to /opt/stack/data/n-cpu-1/compute_id [ 792.321496] env[69992]: WARNING nova.compute.manager [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Compute nodes ['9dc5dd7f-a3af-48a9-a04e-f6c1d333da28'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 793.326639] env[69992]: INFO nova.compute.manager [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 794.332175] env[69992]: WARNING nova.compute.manager [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 794.332561] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.332719] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.332886] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.333051] env[69992]: DEBUG nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 794.333993] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9069baa-7aab-4a33-b7cf-cb4cee7f35bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.341941] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef566cf1-023e-4410-b0cc-0a226d527bde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.355164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1632869-3c18-433e-a1f4-bbc92d61c27b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.361321] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2405490-3e76-43e6-9e04-a6df2e0b7eb8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.389325] env[69992]: DEBUG nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181139MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 794.389459] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.389660] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.891985] env[69992]: WARNING nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] No compute node record for cpu-1:9dc5dd7f-a3af-48a9-a04e-f6c1d333da28: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 could not be found. [ 795.396369] env[69992]: INFO nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 [ 796.904647] env[69992]: DEBUG nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 796.905076] env[69992]: DEBUG nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 797.058758] env[69992]: INFO nova.scheduler.client.report [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] [req-88756cd6-b567-4e2b-ae69-ed29c427461f] Created resource provider record via placement API for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 797.074871] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d85161-1c60-4f5d-b09e-170d6a0bc7f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.082548] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45ca7ea-c10e-49d1-8c2b-7a458ae74e1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.111943] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7042435-462b-4aef-aa40-1264ff2dae9c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.119156] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05e9ce2-4d12-4e76-b190-ee42ede19f77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.131974] env[69992]: DEBUG nova.compute.provider_tree [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.691433] env[69992]: DEBUG nova.scheduler.client.report [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 797.691669] env[69992]: DEBUG nova.compute.provider_tree [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 0 to 1 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 797.691831] env[69992]: DEBUG nova.compute.provider_tree [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.762636] env[69992]: DEBUG nova.compute.provider_tree [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 1 to 2 during operation: update_traits {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 798.267421] env[69992]: DEBUG nova.compute.resource_tracker [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 798.267690] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.878s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.267792] env[69992]: DEBUG nova.service [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Creating RPC server for service compute {{(pid=69992) start /opt/stack/nova/nova/service.py:186}} [ 798.283599] env[69992]: DEBUG nova.service [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] Join ServiceGroup membership for this service compute {{(pid=69992) start /opt/stack/nova/nova/service.py:203}} [ 798.283784] env[69992]: DEBUG nova.servicegroup.drivers.db [None req-e68c0e67-6bf6-40fc-a4cf-e15963838548 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69992) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 828.286279] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.788705] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Getting list of instances from cluster (obj){ [ 828.788705] env[69992]: value = "domain-c8" [ 828.788705] env[69992]: _type = "ClusterComputeResource" [ 828.788705] env[69992]: } {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 828.789914] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064c43f5-3061-4b06-808b-61754d3aa3ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.802167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Got total of 0 instances {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 828.802416] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 828.802739] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Getting list of instances from cluster (obj){ [ 828.802739] env[69992]: value = "domain-c8" [ 828.802739] env[69992]: _type = "ClusterComputeResource" [ 828.802739] env[69992]: } {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 828.807636] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d5a362-636d-4b19-875c-fac504c7ed93 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.818904] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Got total of 0 instances {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 833.240799] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "64ab568c-a2ef-4bac-8885-3dde76f9f764" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.241079] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.751069] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 834.297295] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.297587] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.300719] env[69992]: INFO nova.compute.claims [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.371850] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fdb590-bd85-4196-8490-a05756af58b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.382460] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2199a249-15b1-400e-b14c-f579ee8dee31 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.443928] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec95687-7371-4a04-be6a-dc3f23b5de62 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.453937] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fa78db-4c51-4f79-8fd2-78cc3aeab7ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.470230] env[69992]: DEBUG nova.compute.provider_tree [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.975523] env[69992]: DEBUG nova.scheduler.client.report [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.204335] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.204802] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.487453] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.487453] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.708532] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 836.994174] env[69992]: DEBUG nova.compute.utils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.994174] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.994174] env[69992]: DEBUG nova.network.neutron [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.235663] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.235932] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.237550] env[69992]: INFO nova.compute.claims [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.500080] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 838.222505] env[69992]: DEBUG nova.policy [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cf3696882f641b18e527819e746fca7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23727c76d64d4449820b8f861230275e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 838.297080] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8a64a1-5e20-4d8b-8b33-7b3fb5f2a8ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.306459] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0606ffc5-9c91-4fb5-bd77-b07c38c68aeb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.342461] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9430c53-0d79-42ce-bdad-7292794fdb86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.350919] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f52ff0-383f-4ab4-9869-36b02cf78288 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.366358] env[69992]: DEBUG nova.compute.provider_tree [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.515048] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.552494] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.552737] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.552888] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.555163] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.555398] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.555566] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.555857] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.556065] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.557998] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.557998] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.557998] env[69992]: DEBUG nova.virt.hardware [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.558620] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb28aa93-3518-4dc8-afcb-e2260b90a550 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.568795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590da5dd-ca4e-4c25-99b9-86ab51f819aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.591960] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceb874b-c1ee-4381-a1e6-05f16a183f63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.873095] env[69992]: DEBUG nova.scheduler.client.report [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.887814] env[69992]: DEBUG nova.network.neutron [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Successfully created port: bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.249387] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.249620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.379848] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.143s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.380601] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 839.635640] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.635640] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.751803] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.887825] env[69992]: DEBUG nova.compute.utils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 839.887825] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 839.889139] env[69992]: DEBUG nova.network.neutron [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.975304] env[69992]: DEBUG nova.policy [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f2d51da9ad3453f85330a9453d5b79f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbe8c17ad0154c55a053d464c46a4857', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 840.140878] env[69992]: DEBUG nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.285219] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.285219] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.285219] env[69992]: INFO nova.compute.claims [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.394989] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 840.673197] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.970875] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "93b78a8b-389c-4114-8c1d-da80146d80f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.971130] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.082589] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.082589] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.294423] env[69992]: DEBUG nova.network.neutron [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Successfully created port: c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.412143] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 841.437851] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694e60e8-487d-4c2b-b172-f40059a979c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.449900] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329cd649-0594-41fe-bec1-71a30fa10d3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.456699] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 841.456831] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.457255] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 841.457255] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.457255] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 841.457417] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 841.457637] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 841.458093] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 841.458093] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 841.458197] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 841.458300] env[69992]: DEBUG nova.virt.hardware [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 841.459071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ffab92-5f71-4576-a1f0-81f6fe40e62a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.491812] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.499370] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc94e5c-c8e9-47e9-933a-e5b43ac27f9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.506994] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d12709f-49d7-4765-aa3d-bd5469ccdc3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.513069] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8994b29f-cdc2-41ad-a807-ea3f000be494 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.528785] env[69992]: DEBUG nova.compute.provider_tree [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.586345] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 842.020894] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.036949] env[69992]: DEBUG nova.scheduler.client.report [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.107899] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.364575] env[69992]: DEBUG nova.network.neutron [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Successfully updated port: bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.542718] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.543662] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 842.548089] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.878s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.549997] env[69992]: INFO nova.compute.claims [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.869045] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.872377] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquired lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.872508] env[69992]: DEBUG nova.network.neutron [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.057173] env[69992]: DEBUG nova.compute.utils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 843.058488] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 843.058664] env[69992]: DEBUG nova.network.neutron [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 843.193621] env[69992]: DEBUG nova.policy [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c482cf0b48b849538dd7a47fea8a6bb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '416f0d4ae65b473a98668257b289250e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 843.248942] env[69992]: DEBUG nova.compute.manager [req-ee3c5a91-2734-4781-bf19-65ab322ed261 req-dcfc2767-f8c1-4713-b655-8ce2388a510c service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Received event network-vif-plugged-bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 843.248942] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee3c5a91-2734-4781-bf19-65ab322ed261 req-dcfc2767-f8c1-4713-b655-8ce2388a510c service nova] Acquiring lock "64ab568c-a2ef-4bac-8885-3dde76f9f764-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.249789] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee3c5a91-2734-4781-bf19-65ab322ed261 req-dcfc2767-f8c1-4713-b655-8ce2388a510c service nova] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.250377] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee3c5a91-2734-4781-bf19-65ab322ed261 req-dcfc2767-f8c1-4713-b655-8ce2388a510c service nova] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.252035] env[69992]: DEBUG nova.compute.manager [req-ee3c5a91-2734-4781-bf19-65ab322ed261 req-dcfc2767-f8c1-4713-b655-8ce2388a510c service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] No waiting events found dispatching network-vif-plugged-bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 843.252035] env[69992]: WARNING nova.compute.manager [req-ee3c5a91-2734-4781-bf19-65ab322ed261 req-dcfc2767-f8c1-4713-b655-8ce2388a510c service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Received unexpected event network-vif-plugged-bb164768-c900-42bd-819e-eb523bfc2d54 for instance with vm_state building and task_state spawning. [ 843.506062] env[69992]: DEBUG nova.network.neutron [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.564996] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 843.702918] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b181343-894c-444b-a11b-9a2980cbb5af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.717381] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8faff6-00b0-4620-8b18-f0c26f9dbcae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.754068] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603891d8-d218-43a5-9e4c-df9ab0171d2f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.762340] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e00ada8-427a-47e1-b01c-9ed06ba4cead {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.781532] env[69992]: DEBUG nova.compute.provider_tree [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.286178] env[69992]: DEBUG nova.scheduler.client.report [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.382610] env[69992]: DEBUG nova.network.neutron [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updating instance_info_cache with network_info: [{"id": "bb164768-c900-42bd-819e-eb523bfc2d54", "address": "fa:16:3e:8d:66:f4", "network": {"id": "023d028d-4d61-4461-91bb-ebf0da99219b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-413392921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23727c76d64d4449820b8f861230275e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb164768-c9", "ovs_interfaceid": "bb164768-c900-42bd-819e-eb523bfc2d54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.454460] env[69992]: DEBUG nova.network.neutron [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Successfully created port: 0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.575751] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 844.612020] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.612281] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.612441] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.612653] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.612805] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.612947] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.614238] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.614645] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.614881] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.615067] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.615244] env[69992]: DEBUG nova.virt.hardware [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.616182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e9aa2d-4f49-4555-89a4-4f709abc29f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.631326] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c32d5c-5501-468a-9a40-987c7693cae2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.784038] env[69992]: DEBUG nova.network.neutron [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Successfully updated port: c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.792861] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.792861] env[69992]: DEBUG nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 844.795794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.775s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.801278] env[69992]: INFO nova.compute.claims [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.886204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Releasing lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.886204] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Instance network_info: |[{"id": "bb164768-c900-42bd-819e-eb523bfc2d54", "address": "fa:16:3e:8d:66:f4", "network": {"id": "023d028d-4d61-4461-91bb-ebf0da99219b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-413392921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23727c76d64d4449820b8f861230275e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb164768-c9", "ovs_interfaceid": "bb164768-c900-42bd-819e-eb523bfc2d54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 844.886601] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:66:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62d6a386-ffdb-4232-83f3-cb21c5e59e85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb164768-c900-42bd-819e-eb523bfc2d54', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.903823] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.905675] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-245ee4e9-81fd-40bd-80b5-4bf528c2abb7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.924931] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Created folder: OpenStack in parent group-v4. [ 844.924931] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Creating folder: Project (23727c76d64d4449820b8f861230275e). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.924931] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-754316e8-0c8a-49ef-938f-6cfd56861495 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.934322] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Created folder: Project (23727c76d64d4449820b8f861230275e) in parent group-v581821. [ 844.934587] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Creating folder: Instances. Parent ref: group-v581822. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.934844] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-338efaed-36e9-44b8-987d-e37d511a42c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.951389] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Created folder: Instances in parent group-v581822. [ 844.951659] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.951863] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.952098] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e96466b2-d6cd-43dd-b354-ef52398ac857 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.979991] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.979991] env[69992]: value = "task-2896546" [ 844.979991] env[69992]: _type = "Task" [ 844.979991] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.992232] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896546, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.289289] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.289289] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquired lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.291195] env[69992]: DEBUG nova.network.neutron [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.305158] env[69992]: DEBUG nova.compute.utils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 845.315126] env[69992]: DEBUG nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 845.492480] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896546, 'name': CreateVM_Task, 'duration_secs': 0.34573} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.492719] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.509973] env[69992]: DEBUG oslo_vmware.service [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b0fda1-5c79-42f4-9d8b-48a9c5314b06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.524762] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.525044] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.525876] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.527888] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3248ec7-9c85-4831-bdab-ceb298032bc9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.534395] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 845.534395] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52803d7b-0bf3-e137-51af-cd564cd28462" [ 845.534395] env[69992]: _type = "Task" [ 845.534395] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.546470] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52803d7b-0bf3-e137-51af-cd564cd28462, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.812610] env[69992]: DEBUG nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 845.857922] env[69992]: DEBUG nova.network.neutron [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.972324] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f331c2-0f3f-46ad-aa4e-b713543a06ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.981502] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f8c72c-f794-4907-bd69-e05e3a33aa00 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.026305] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef146fa9-e93f-4482-bbc9-a70d8bc9c984 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.032459] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc845ee-eb79-4129-b93f-5e9f7b628f55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.057467] env[69992]: DEBUG nova.compute.provider_tree [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.059864] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.059864] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.059864] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.060047] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.061517] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.061517] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-311e873f-966c-4b55-8e70-693f1d921181 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.079232] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.080656] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.080773] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a317449-3fa5-44ca-8bb1-a0d999ff1b9b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.088686] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-927847d3-5511-45da-8150-073ba370a26f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.093850] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 846.093850] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52080dc8-42c3-f305-6bf4-4a06cf16742f" [ 846.093850] env[69992]: _type = "Task" [ 846.093850] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.105034] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52080dc8-42c3-f305-6bf4-4a06cf16742f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.166304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "068507bb-ee7a-44f7-b315-7d4b2b70e735" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.166552] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.243727] env[69992]: DEBUG nova.network.neutron [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Updating instance_info_cache with network_info: [{"id": "c01a5abb-0c56-4377-ab40-619062fc6092", "address": "fa:16:3e:bb:32:3c", "network": {"id": "1f29edc7-882e-4623-86e7-3d4c6cd47bdd", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1063426455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbe8c17ad0154c55a053d464c46a4857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01a5abb-0c", "ovs_interfaceid": "c01a5abb-0c56-4377-ab40-619062fc6092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.562844] env[69992]: DEBUG nova.scheduler.client.report [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.607725] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 846.608009] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Creating directory with path [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.608290] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d475d632-dd34-4191-bcd7-c1fa3d8ec0a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.625913] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.628614] env[69992]: DEBUG nova.network.neutron [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Successfully updated port: 0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.629833] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.630284] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.630502] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.630724] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.630926] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.631147] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.633242] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 846.635109] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.638831] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Created directory with path [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.638831] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Fetch image to [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 846.638831] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Downloading image file data eb50549f-9db8-4c15-a738-0e4b1e9e33fb to [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk on the data store datastore2 {{(pid=69992) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 846.642860] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fd0d61-6a93-4885-87b5-63076bb71f0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.658284] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2bd869-63c5-4d2b-b75c-d1a3f69c4859 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.672397] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 846.676809] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692a6395-6aec-45ab-a506-743078accb90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.721021] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833ef87b-79c7-480c-943e-b2733506ebfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.729270] env[69992]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-119d9011-10c3-43fd-a1b7-37aa9871d75d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.749623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Releasing lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.750093] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Instance network_info: |[{"id": "c01a5abb-0c56-4377-ab40-619062fc6092", "address": "fa:16:3e:bb:32:3c", "network": {"id": "1f29edc7-882e-4623-86e7-3d4c6cd47bdd", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1063426455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbe8c17ad0154c55a053d464c46a4857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01a5abb-0c", "ovs_interfaceid": "c01a5abb-0c56-4377-ab40-619062fc6092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 846.751190] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:32:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92233552-2c0c-416e-9bf3-bfcca8eda2dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c01a5abb-0c56-4377-ab40-619062fc6092', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.762921] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Creating folder: Project (fbe8c17ad0154c55a053d464c46a4857). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.765946] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d392011-77c2-4f9e-af4f-03758c684b99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.768234] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Downloading image file data eb50549f-9db8-4c15-a738-0e4b1e9e33fb to the data store datastore2 {{(pid=69992) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 846.781956] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Created folder: Project (fbe8c17ad0154c55a053d464c46a4857) in parent group-v581821. [ 846.781956] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Creating folder: Instances. Parent ref: group-v581825. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.781956] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86565f8e-7aea-4451-9b43-23027c1b6996 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.790771] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Created folder: Instances in parent group-v581825. [ 846.790983] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 846.793011] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.794519] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0b546a7-0621-4baa-8267-9b8fd3c38f73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.816700] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.816700] env[69992]: value = "task-2896549" [ 846.816700] env[69992]: _type = "Task" [ 846.816700] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.826102] env[69992]: DEBUG nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 846.828333] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896549, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.876866] env[69992]: DEBUG oslo_vmware.rw_handles [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 847.068925] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.069205] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 847.072680] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.965s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.077458] env[69992]: INFO nova.compute.claims [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.100235] env[69992]: DEBUG nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Received event network-changed-bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.100560] env[69992]: DEBUG nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Refreshing instance network info cache due to event network-changed-bb164768-c900-42bd-819e-eb523bfc2d54. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 847.100852] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Acquiring lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.101025] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Acquired lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.101227] env[69992]: DEBUG nova.network.neutron [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Refreshing network info cache for port bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.131698] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "refresh_cache-e74441fc-361f-4e0b-bfdd-6f8213db51e3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.131843] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquired lock "refresh_cache-e74441fc-361f-4e0b-bfdd-6f8213db51e3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.132104] env[69992]: DEBUG nova.network.neutron [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.140992] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.200641] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 847.201229] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.201229] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 847.201415] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.201728] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 847.201728] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 847.202070] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 847.202263] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 847.202471] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 847.202667] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 847.202880] env[69992]: DEBUG nova.virt.hardware [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 847.204790] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4295eee-c60d-4cd3-8246-c785738b542a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.214178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.220331] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a89dc13-7b45-40d4-9c80-99b7bea1a023 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.240032] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.246112] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Creating folder: Project (eb63b5f065f845389fb9bf6727a9da64). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.251256] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cddef6e3-20ed-44d1-b083-b4e4ba3e53bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.266354] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Created folder: Project (eb63b5f065f845389fb9bf6727a9da64) in parent group-v581821. [ 847.266354] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Creating folder: Instances. Parent ref: group-v581828. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.268248] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-027f0ae7-ecc1-4192-9363-b076b3c4f30a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.278707] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Created folder: Instances in parent group-v581828. [ 847.279271] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.281291] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.281582] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f8e253c-357e-4548-a510-67f59cabdf6b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.303414] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.303414] env[69992]: value = "task-2896552" [ 847.303414] env[69992]: _type = "Task" [ 847.303414] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.313691] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896552, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.328269] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896549, 'name': CreateVM_Task, 'duration_secs': 0.368003} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.328397] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.329110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.329312] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.329660] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.329915] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83415ffc-8029-4b61-a48a-c8c44e385440 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.335830] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 847.335830] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52149da9-ffab-d016-037a-9628ea3780d2" [ 847.335830] env[69992]: _type = "Task" [ 847.335830] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.345648] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52149da9-ffab-d016-037a-9628ea3780d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.585033] env[69992]: DEBUG nova.compute.utils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 847.592536] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 847.592801] env[69992]: DEBUG nova.network.neutron [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.625960] env[69992]: DEBUG oslo_vmware.rw_handles [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 847.626177] env[69992]: DEBUG oslo_vmware.rw_handles [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 847.758978] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Downloaded image file data eb50549f-9db8-4c15-a738-0e4b1e9e33fb to vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk on the data store datastore2 {{(pid=69992) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 847.760669] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 847.760903] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Copying Virtual Disk [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk to [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.761187] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0bffb1a-2dd4-4089-a128-0177b301eceb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.775288] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 847.775288] env[69992]: value = "task-2896553" [ 847.775288] env[69992]: _type = "Task" [ 847.775288] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.788089] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.812952] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896552, 'name': CreateVM_Task, 'duration_secs': 0.312955} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.815285] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.815769] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.849342] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.849631] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.849870] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.850101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.850411] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.850665] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86211bca-10fb-42ee-aab7-4e4c92d5a4db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.856139] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 847.856139] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52327eb9-ac79-4d82-b777-356c50812372" [ 847.856139] env[69992]: _type = "Task" [ 847.856139] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.866797] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52327eb9-ac79-4d82-b777-356c50812372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.104778] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 848.164256] env[69992]: DEBUG nova.network.neutron [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.274489] env[69992]: DEBUG nova.policy [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c76ec6aac19f4e74943b20dd5e3224a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46a2d84b932449edb69966a5884990d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 848.289922] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896553, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.317596] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b8eb9d-c677-4f2d-85b8-b8f48435d4b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.332412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c495700-30c6-475a-ba1a-22f4f7e2305b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.391842] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6212a2-9375-4efb-85f6-4c6cd4bcff67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.407114] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.407371] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.407584] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.409204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4172ed9-58a7-4478-8407-d060da031a29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.433741] env[69992]: DEBUG nova.compute.provider_tree [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.798823] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.707537} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.798823] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Copied Virtual Disk [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk to [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.798823] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleting the datastore file [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.798823] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6053d76d-3647-4828-9706-6d7d234f38e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.799844] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 848.799844] env[69992]: value = "task-2896554" [ 848.799844] env[69992]: _type = "Task" [ 848.799844] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.813393] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.943803] env[69992]: DEBUG nova.scheduler.client.report [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.122079] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 849.173144] env[69992]: DEBUG nova.network.neutron [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Updating instance_info_cache with network_info: [{"id": "0adb34fa-f52f-4b4d-983b-afa1a04f5624", "address": "fa:16:3e:b7:6e:e5", "network": {"id": "ad60b844-dad4-4331-94a2-e2218409c6ba", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-77980625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "416f0d4ae65b473a98668257b289250e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0adb34fa-f5", "ovs_interfaceid": "0adb34fa-f52f-4b4d-983b-afa1a04f5624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.303922] env[69992]: DEBUG nova.network.neutron [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updated VIF entry in instance network info cache for port bb164768-c900-42bd-819e-eb523bfc2d54. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.303922] env[69992]: DEBUG nova.network.neutron [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updating instance_info_cache with network_info: [{"id": "bb164768-c900-42bd-819e-eb523bfc2d54", "address": "fa:16:3e:8d:66:f4", "network": {"id": "023d028d-4d61-4461-91bb-ebf0da99219b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-413392921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23727c76d64d4449820b8f861230275e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb164768-c9", "ovs_interfaceid": "bb164768-c900-42bd-819e-eb523bfc2d54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.315677] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896554, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02426} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.316972] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.317072] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Moving file from [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e/eb50549f-9db8-4c15-a738-0e4b1e9e33fb to [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb. {{(pid=69992) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 849.317305] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-504d745d-19cc-440e-b665-97f3a3bf4b3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.327431] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 849.327431] env[69992]: value = "task-2896555" [ 849.327431] env[69992]: _type = "Task" [ 849.327431] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.337775] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896555, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.364260] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 849.364725] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.364884] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 849.365092] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.365262] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 849.365409] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 849.365631] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 849.365807] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 849.365993] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 849.366180] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 849.366354] env[69992]: DEBUG nova.virt.hardware [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 849.368499] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa108b0-4ad7-494e-b3ad-d67215395a82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.378653] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09a45a6-6f07-469a-b817-0ec64e2162f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.452833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.452833] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 849.456080] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.315s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.456392] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.456938] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 849.463488] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.244s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.463488] env[69992]: INFO nova.compute.claims [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.468352] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f828f5ba-7743-452b-acfd-795e3678ba18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.480983] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ef2296-72f2-4d86-8de3-7e2cb70b548b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.498589] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad6dddd-6d61-4df4-af9b-5fb4dfb43503 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.505898] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf9ff11-e1cb-405a-aa02-139d968d962a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.543938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.543938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.545872] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181137MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 849.545872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.675525] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Releasing lock "refresh_cache-e74441fc-361f-4e0b-bfdd-6f8213db51e3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.675920] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Instance network_info: |[{"id": "0adb34fa-f52f-4b4d-983b-afa1a04f5624", "address": "fa:16:3e:b7:6e:e5", "network": {"id": "ad60b844-dad4-4331-94a2-e2218409c6ba", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-77980625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "416f0d4ae65b473a98668257b289250e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0adb34fa-f5", "ovs_interfaceid": "0adb34fa-f52f-4b4d-983b-afa1a04f5624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 849.676566] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:6e:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0adb34fa-f52f-4b4d-983b-afa1a04f5624', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.684434] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Creating folder: Project (416f0d4ae65b473a98668257b289250e). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.685050] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f95a9f7-582d-48a7-b2d6-4c38013d4d58 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.699326] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Created folder: Project (416f0d4ae65b473a98668257b289250e) in parent group-v581821. [ 849.699590] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Creating folder: Instances. Parent ref: group-v581831. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.699738] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0059691c-e245-4b61-a7c8-eac2641bb831 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.712021] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Created folder: Instances in parent group-v581831. [ 849.712021] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.712021] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 849.712021] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fd3f1a0-822f-4804-b5d6-1216036e780b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.736712] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.736712] env[69992]: value = "task-2896558" [ 849.736712] env[69992]: _type = "Task" [ 849.736712] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.747436] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896558, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.809164] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Releasing lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.809164] env[69992]: DEBUG nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Received event network-vif-plugged-c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.809164] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Acquiring lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.809164] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.809570] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.809570] env[69992]: DEBUG nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] No waiting events found dispatching network-vif-plugged-c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 849.809642] env[69992]: WARNING nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Received unexpected event network-vif-plugged-c01a5abb-0c56-4377-ab40-619062fc6092 for instance with vm_state building and task_state spawning. [ 849.809745] env[69992]: DEBUG nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Received event network-changed-c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.809890] env[69992]: DEBUG nova.compute.manager [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Refreshing instance network info cache due to event network-changed-c01a5abb-0c56-4377-ab40-619062fc6092. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 849.810203] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Acquiring lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.810266] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Acquired lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.810381] env[69992]: DEBUG nova.network.neutron [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Refreshing network info cache for port c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.841583] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896555, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025785} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.841925] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] File moved {{(pid=69992) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 849.842103] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Cleaning up location [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 849.843472] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleting the datastore file [datastore2] vmware_temp/c6a81a14-e760-4b72-b3b3-5668f05e891e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.843472] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b40824f-4090-4115-bd28-39db59420bef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.850622] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 849.850622] env[69992]: value = "task-2896559" [ 849.850622] env[69992]: _type = "Task" [ 849.850622] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.859681] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.959530] env[69992]: DEBUG nova.compute.utils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.962263] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.962263] env[69992]: DEBUG nova.network.neutron [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.004470] env[69992]: DEBUG nova.network.neutron [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Successfully created port: ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.049285] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 850.079408] env[69992]: DEBUG nova.compute.manager [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Received event network-vif-plugged-0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.079529] env[69992]: DEBUG oslo_concurrency.lockutils [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] Acquiring lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.083115] env[69992]: DEBUG oslo_concurrency.lockutils [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.083411] env[69992]: DEBUG oslo_concurrency.lockutils [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.004s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.083608] env[69992]: DEBUG nova.compute.manager [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] No waiting events found dispatching network-vif-plugged-0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 850.083805] env[69992]: WARNING nova.compute.manager [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Received unexpected event network-vif-plugged-0adb34fa-f52f-4b4d-983b-afa1a04f5624 for instance with vm_state building and task_state spawning. [ 850.083918] env[69992]: DEBUG nova.compute.manager [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Received event network-changed-0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.084083] env[69992]: DEBUG nova.compute.manager [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Refreshing instance network info cache due to event network-changed-0adb34fa-f52f-4b4d-983b-afa1a04f5624. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 850.084283] env[69992]: DEBUG oslo_concurrency.lockutils [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] Acquiring lock "refresh_cache-e74441fc-361f-4e0b-bfdd-6f8213db51e3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.084418] env[69992]: DEBUG oslo_concurrency.lockutils [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] Acquired lock "refresh_cache-e74441fc-361f-4e0b-bfdd-6f8213db51e3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.084976] env[69992]: DEBUG nova.network.neutron [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Refreshing network info cache for port 0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.249737] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896558, 'name': CreateVM_Task, 'duration_secs': 0.360683} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.250020] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.250893] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.251310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.251773] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 850.252072] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6772431-611a-489c-964b-5ce871b98542 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.257919] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 850.257919] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d3e665-756a-6243-2107-95d4a26850f0" [ 850.257919] env[69992]: _type = "Task" [ 850.257919] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.266826] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d3e665-756a-6243-2107-95d4a26850f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.346824] env[69992]: DEBUG nova.policy [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd61a94a595764844b2793095d3ed1c0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a948803e8594dd593233e2b55cf0925', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 850.364911] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027168} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.365297] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.366129] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ba5e2b-cc0f-4683-83c8-58e3b4f02ce7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.374978] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 850.374978] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5220111f-d19d-6c3b-f833-f5a2d331c6b8" [ 850.374978] env[69992]: _type = "Task" [ 850.374978] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.387461] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5220111f-d19d-6c3b-f833-f5a2d331c6b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009127} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.387984] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.388139] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 64ab568c-a2ef-4bac-8885-3dde76f9f764/64ab568c-a2ef-4bac-8885-3dde76f9f764.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.388658] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.388658] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.388887] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca1e9fe2-b45c-4b23-a983-9d165ba388da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.392879] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6cc0607-3270-47fb-bb8d-cec376594610 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.398746] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 850.398746] env[69992]: value = "task-2896560" [ 850.398746] env[69992]: _type = "Task" [ 850.398746] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.407324] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.407324] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.407729] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c8b32ba-aebe-4db6-a1ff-e409ed21e245 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.413171] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.417225] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 850.417225] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524732c5-3758-fc6d-ca8a-92e6171a23ce" [ 850.417225] env[69992]: _type = "Task" [ 850.417225] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.425038] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524732c5-3758-fc6d-ca8a-92e6171a23ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.466173] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 850.592389] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.652482] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3709687c-37cf-4461-9925-ff2a52a2c410 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.668509] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef40979-be42-4b7e-901c-15d2c7437b9c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.717171] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bce4051-781c-4dc8-a8dd-8191a115ab8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.727343] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0140d29-a0a3-43b7-bbd6-2bd5292210c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.745351] env[69992]: DEBUG nova.compute.provider_tree [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.772876] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d3e665-756a-6243-2107-95d4a26850f0, 'name': SearchDatastore_Task, 'duration_secs': 0.008662} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.773175] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.773466] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.773686] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.913890] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476762} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.914389] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 64ab568c-a2ef-4bac-8885-3dde76f9f764/64ab568c-a2ef-4bac-8885-3dde76f9f764.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.914389] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.915034] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93d76b2a-97f4-46a5-a515-044d6562263c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.929721] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524732c5-3758-fc6d-ca8a-92e6171a23ce, 'name': SearchDatastore_Task, 'duration_secs': 0.007746} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.932767] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 850.932767] env[69992]: value = "task-2896561" [ 850.932767] env[69992]: _type = "Task" [ 850.932767] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.932767] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ea48454-54ac-4780-ad32-8cc3afcd6e8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.943807] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 850.943807] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52979337-0c55-2164-179f-1af2d9e96db6" [ 850.943807] env[69992]: _type = "Task" [ 850.943807] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.958236] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52979337-0c55-2164-179f-1af2d9e96db6, 'name': SearchDatastore_Task, 'duration_secs': 0.008787} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.958580] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.958858] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0/fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.959242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.959439] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.960104] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59ead2fa-91fd-4e17-a6ca-952c189a55f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.963726] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-635f1a60-d5b7-4056-8319-17d30d939e77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.977820] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 850.977820] env[69992]: value = "task-2896562" [ 850.977820] env[69992]: _type = "Task" [ 850.977820] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.987371] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.987683] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.994262] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ba397a1-a483-450c-9ab7-67b6aa09148a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.005786] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 851.005786] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e27868-cbba-4aaa-6b99-fb1329d8fcf3" [ 851.005786] env[69992]: _type = "Task" [ 851.005786] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.006078] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896562, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.018271] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e27868-cbba-4aaa-6b99-fb1329d8fcf3, 'name': SearchDatastore_Task, 'duration_secs': 0.009371} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.019088] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09d7d8af-ff1e-4c7c-8116-e4a4789f8137 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.030276] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 851.030276] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525a8b86-542c-4b93-985a-75201047396b" [ 851.030276] env[69992]: _type = "Task" [ 851.030276] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.038934] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525a8b86-542c-4b93-985a-75201047396b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.251637] env[69992]: DEBUG nova.scheduler.client.report [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.448444] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087404} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.448444] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.448444] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fab37e-afc0-4fd5-96f4-bba3183e5068 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.451947] env[69992]: DEBUG nova.network.neutron [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Updated VIF entry in instance network info cache for port c01a5abb-0c56-4377-ab40-619062fc6092. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.452283] env[69992]: DEBUG nova.network.neutron [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Updating instance_info_cache with network_info: [{"id": "c01a5abb-0c56-4377-ab40-619062fc6092", "address": "fa:16:3e:bb:32:3c", "network": {"id": "1f29edc7-882e-4623-86e7-3d4c6cd47bdd", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1063426455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbe8c17ad0154c55a053d464c46a4857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01a5abb-0c", "ovs_interfaceid": "c01a5abb-0c56-4377-ab40-619062fc6092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.479804] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 64ab568c-a2ef-4bac-8885-3dde76f9f764/64ab568c-a2ef-4bac-8885-3dde76f9f764.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.480897] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d77a3e30-ca53-4fd4-adac-66bacc881615 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.497981] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 851.510937] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 851.510937] env[69992]: value = "task-2896563" [ 851.510937] env[69992]: _type = "Task" [ 851.510937] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.515385] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896562, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463192} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.521164] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0/fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 851.521164] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 851.522043] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b01d659-fe14-47fc-b271-9f62fc17de23 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.529223] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896563, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.534023] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 851.534023] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.534023] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 851.534523] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.534523] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 851.534523] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 851.534523] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 851.534523] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 851.534665] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 851.534665] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 851.534665] env[69992]: DEBUG nova.virt.hardware [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 851.534665] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 851.534665] env[69992]: value = "task-2896564" [ 851.534665] env[69992]: _type = "Task" [ 851.534665] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.535280] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eed6c76-4401-4946-963a-1109c2a6f403 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.555515] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d915f0-04b2-422b-b292-0e969ff886cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.559432] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525a8b86-542c-4b93-985a-75201047396b, 'name': SearchDatastore_Task, 'duration_secs': 0.008482} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.564493] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.564493] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] f249c0b9-ddd7-4b63-ae3a-11035764d3e5/f249c0b9-ddd7-4b63-ae3a-11035764d3e5.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.565190] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 851.565190] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 851.565300] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-249d096b-581a-4aa8-8a4f-21274e30d4e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.578310] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4764e670-fe6e-45da-9055-f868c7990cfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.580774] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896564, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.593142] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 851.593142] env[69992]: value = "task-2896565" [ 851.593142] env[69992]: _type = "Task" [ 851.593142] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.593142] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 851.593142] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 851.593142] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b59a2627-be6f-496b-9ccf-a332148d21a4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.599439] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 851.599439] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ab6693-be0c-552a-97cf-6392cddd8734" [ 851.599439] env[69992]: _type = "Task" [ 851.599439] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.604734] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.620018] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ab6693-be0c-552a-97cf-6392cddd8734, 'name': SearchDatastore_Task, 'duration_secs': 0.008724} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.620018] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3999a5b4-5ced-4a18-99ea-a5c5d6aac20e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.628493] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 851.628493] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e56395-b93a-ba46-6c46-016f7786896f" [ 851.628493] env[69992]: _type = "Task" [ 851.628493] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.644644] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e56395-b93a-ba46-6c46-016f7786896f, 'name': SearchDatastore_Task, 'duration_secs': 0.008923} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.645428] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.647248] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] e74441fc-361f-4e0b-bfdd-6f8213db51e3/e74441fc-361f-4e0b-bfdd-6f8213db51e3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.649841] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d3a08d4-ac12-471c-8874-28872ba45e13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.662087] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 851.662087] env[69992]: value = "task-2896566" [ 851.662087] env[69992]: _type = "Task" [ 851.662087] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.681767] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.732284] env[69992]: DEBUG nova.network.neutron [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Updated VIF entry in instance network info cache for port 0adb34fa-f52f-4b4d-983b-afa1a04f5624. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.732284] env[69992]: DEBUG nova.network.neutron [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Updating instance_info_cache with network_info: [{"id": "0adb34fa-f52f-4b4d-983b-afa1a04f5624", "address": "fa:16:3e:b7:6e:e5", "network": {"id": "ad60b844-dad4-4331-94a2-e2218409c6ba", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-77980625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "416f0d4ae65b473a98668257b289250e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0adb34fa-f5", "ovs_interfaceid": "0adb34fa-f52f-4b4d-983b-afa1a04f5624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.764142] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.766293] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 851.772997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.223s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.956587] env[69992]: DEBUG oslo_concurrency.lockutils [req-77d56ede-05b1-4c27-9717-956ee723b673 req-0685f57d-b40d-463a-bb0a-1320b46268fb service nova] Releasing lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.031097] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896563, 'name': ReconfigVM_Task, 'duration_secs': 0.312032} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.031188] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 64ab568c-a2ef-4bac-8885-3dde76f9f764/64ab568c-a2ef-4bac-8885-3dde76f9f764.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.033235] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77e79d97-c90a-4f15-9aac-ec5fe02036c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.048352] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 852.048352] env[69992]: value = "task-2896567" [ 852.048352] env[69992]: _type = "Task" [ 852.048352] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.060887] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896564, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065324} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.061601] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.062797] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a7f4a9-4e77-44e3-9668-c5218adc7c8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.071044] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896567, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.095575] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0/fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.096996] env[69992]: DEBUG nova.network.neutron [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Successfully created port: fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.099500] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60bc8519-3ea8-4911-8137-05620fc61b8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.131586] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461169} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.133261] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] f249c0b9-ddd7-4b63-ae3a-11035764d3e5/f249c0b9-ddd7-4b63-ae3a-11035764d3e5.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.133496] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.133922] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 852.133922] env[69992]: value = "task-2896568" [ 852.133922] env[69992]: _type = "Task" [ 852.133922] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.136323] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b6001fa-1d07-454e-b78a-6d5c1c6dfb76 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.149258] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.151657] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 852.151657] env[69992]: value = "task-2896569" [ 852.151657] env[69992]: _type = "Task" [ 852.151657] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.165861] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.185511] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896566, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.239558] env[69992]: DEBUG oslo_concurrency.lockutils [req-5eb37aa8-c645-45b7-87b3-f54a847718f1 req-65d2809c-b338-41c6-99d2-0a59f16db7b4 service nova] Releasing lock "refresh_cache-e74441fc-361f-4e0b-bfdd-6f8213db51e3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.277549] env[69992]: DEBUG nova.compute.utils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 852.281723] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 852.285568] env[69992]: DEBUG nova.network.neutron [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.515402] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "1d436762-964d-40d9-871e-ee33c3ba25b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.515725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.566478] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896567, 'name': Rename_Task, 'duration_secs': 0.310129} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.567181] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.567554] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8cb06df-fec4-4047-ba1d-cdda98604aae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.574812] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 852.574812] env[69992]: value = "task-2896570" [ 852.574812] env[69992]: _type = "Task" [ 852.574812] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.592144] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896570, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.645786] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896568, 'name': ReconfigVM_Task, 'duration_secs': 0.464693} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.645786] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Reconfigured VM instance instance-00000002 to attach disk [datastore2] fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0/fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.651023] env[69992]: DEBUG nova.policy [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a80bc64c6d0f48c0b00793cb595cfbbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f02818bc6a9c48b0810968803c8be436', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 852.652887] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ba7b349-ab94-4bdb-b0a9-1b2503ecce51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.665193] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08885} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.670339] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.670339] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 852.670339] env[69992]: value = "task-2896571" [ 852.670339] env[69992]: _type = "Task" [ 852.670339] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.670962] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeed34dd-387d-4b59-9cd6-392727e2421c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.680854] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74768} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.681675] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] e74441fc-361f-4e0b-bfdd-6f8213db51e3/e74441fc-361f-4e0b-bfdd-6f8213db51e3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.681930] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.689524] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84b2c1b7-b888-4d02-8c1f-3ab35f7f1de5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.699717] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] f249c0b9-ddd7-4b63-ae3a-11035764d3e5/f249c0b9-ddd7-4b63-ae3a-11035764d3e5.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.703355] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0709b81-35b7-448e-98df-956dcba89f6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.718543] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896571, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.723924] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 852.723924] env[69992]: value = "task-2896572" [ 852.723924] env[69992]: _type = "Task" [ 852.723924] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.729818] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 852.729818] env[69992]: value = "task-2896573" [ 852.729818] env[69992]: _type = "Task" [ 852.729818] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.738564] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.742821] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896573, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.791858] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 852.833314] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 64ab568c-a2ef-4bac-8885-3dde76f9f764 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.833469] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.833588] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e74441fc-361f-4e0b-bfdd-6f8213db51e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.835021] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance f249c0b9-ddd7-4b63-ae3a-11035764d3e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.835021] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 93b78a8b-389c-4114-8c1d-da80146d80f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.835021] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e934fc79-f7c5-4ca9-9f81-85467c1e9b45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 852.835021] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 068507bb-ee7a-44f7-b315-7d4b2b70e735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.019922] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 853.085881] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896570, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.182744] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896571, 'name': Rename_Task, 'duration_secs': 0.187012} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.183052] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 853.183296] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e2d684a-1a17-493d-82c1-9739dadc470c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.189432] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 853.189432] env[69992]: value = "task-2896574" [ 853.189432] env[69992]: _type = "Task" [ 853.189432] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.197582] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.244582] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078661} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.246528] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.247023] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896573, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.248274] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258a2d05-7316-4053-af32-7d1a7f115c4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.274577] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] e74441fc-361f-4e0b-bfdd-6f8213db51e3/e74441fc-361f-4e0b-bfdd-6f8213db51e3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.274978] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ed17ce4-c69b-46fb-9b08-d4b9f4e3fd89 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.309080] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 853.309080] env[69992]: value = "task-2896575" [ 853.309080] env[69992]: _type = "Task" [ 853.309080] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.318413] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.337229] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance ee4c0f2b-44cb-4b37-8e4a-5706b9932144 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 853.555340] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.562338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.562553] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.585245] env[69992]: DEBUG oslo_vmware.api [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896570, 'name': PowerOnVM_Task, 'duration_secs': 0.511008} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.585561] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.585993] env[69992]: INFO nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Took 15.07 seconds to spawn the instance on the hypervisor. [ 853.586345] env[69992]: DEBUG nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.587171] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e387ca8-0572-4daf-b89e-ab0d92e37586 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.620163] env[69992]: DEBUG nova.network.neutron [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Successfully updated port: ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.707551] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896574, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.746185] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896573, 'name': ReconfigVM_Task, 'duration_secs': 0.591436} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.747021] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Reconfigured VM instance instance-00000004 to attach disk [datastore2] f249c0b9-ddd7-4b63-ae3a-11035764d3e5/f249c0b9-ddd7-4b63-ae3a-11035764d3e5.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.747164] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0723fe66-c1e2-4544-9f10-9b95bbdabee4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.757015] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 853.757015] env[69992]: value = "task-2896576" [ 853.757015] env[69992]: _type = "Task" [ 853.757015] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.765926] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896576, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.811998] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 853.831161] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.844321] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1d436762-964d-40d9-871e-ee33c3ba25b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 853.844321] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 853.844321] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 853.857117] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 853.858570] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.858570] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.858640] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.858796] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.858975] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 853.860876] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 853.860876] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 853.860876] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 853.860876] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 853.860876] env[69992]: DEBUG nova.virt.hardware [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 853.861268] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ac59e3-b57d-4779-9bbd-57f84aca6116 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.873079] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6ec2de-64c9-4dca-a0c6-2eca552191dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.066749] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 854.115850] env[69992]: INFO nova.compute.manager [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Took 19.86 seconds to build instance. [ 854.126259] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "refresh_cache-93b78a8b-389c-4114-8c1d-da80146d80f3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.126774] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired lock "refresh_cache-93b78a8b-389c-4114-8c1d-da80146d80f3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.126774] env[69992]: DEBUG nova.network.neutron [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.195153] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b2757f-256b-484c-9383-77867fdc9882 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.212046] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011cd37c-a876-4811-aff7-4b4771045e1d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.216626] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896574, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.252634] env[69992]: DEBUG nova.network.neutron [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Successfully created port: 0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.257041] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acbfa90-e8cf-4ffc-97a5-e38b708ae180 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.270968] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ca4005-19c0-4f0d-9727-59e9bcad78bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.274910] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896576, 'name': Rename_Task, 'duration_secs': 0.143821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.275194] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.275835] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2107b47-84c3-4f5c-9033-92ef2b98df72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.286467] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.297900] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 854.297900] env[69992]: value = "task-2896577" [ 854.297900] env[69992]: _type = "Task" [ 854.297900] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.309114] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.325224] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896575, 'name': ReconfigVM_Task, 'duration_secs': 0.885898} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.325999] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Reconfigured VM instance instance-00000003 to attach disk [datastore2] e74441fc-361f-4e0b-bfdd-6f8213db51e3/e74441fc-361f-4e0b-bfdd-6f8213db51e3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.326526] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9030b2f5-8ae8-4c82-9a88-31f39a4df9eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.334395] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 854.334395] env[69992]: value = "task-2896578" [ 854.334395] env[69992]: _type = "Task" [ 854.334395] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.345456] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896578, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.596943] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.622565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-768875a9-a531-4b9f-8310-bf13bceaf4f5 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.381s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.704432] env[69992]: DEBUG oslo_vmware.api [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2896574, 'name': PowerOnVM_Task, 'duration_secs': 1.357038} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.704810] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.706194] env[69992]: INFO nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Took 13.29 seconds to spawn the instance on the hypervisor. [ 854.706194] env[69992]: DEBUG nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 854.706194] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498d4e1a-3458-4a6e-9afc-ca7567920a8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.793102] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.811896] env[69992]: DEBUG oslo_vmware.api [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896577, 'name': PowerOnVM_Task, 'duration_secs': 0.462173} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.813467] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 854.813467] env[69992]: INFO nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Took 7.99 seconds to spawn the instance on the hypervisor. [ 854.813467] env[69992]: DEBUG nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 854.814266] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4733e86-2f5e-4719-b6fe-d0b99973fa56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.850042] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896578, 'name': Rename_Task, 'duration_secs': 0.206759} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.850042] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.850042] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fe68493-883a-4cfa-93a6-8b908b4bb4ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.857936] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 854.857936] env[69992]: value = "task-2896579" [ 854.857936] env[69992]: _type = "Task" [ 854.857936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.868542] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896579, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.918413] env[69992]: DEBUG nova.network.neutron [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.232877] env[69992]: INFO nova.compute.manager [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Took 18.02 seconds to build instance. [ 855.301123] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 855.301123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.531s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.301123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.707s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.301123] env[69992]: INFO nova.compute.claims [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.344085] env[69992]: INFO nova.compute.manager [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Took 14.70 seconds to build instance. [ 855.372905] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896579, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.385765] env[69992]: DEBUG nova.compute.manager [req-1e63ffa5-3d77-4f4f-9b48-2f07cb372d2b req-9df8e7ea-8f3d-4dc8-9a43-f4089b8033bc service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Received event network-vif-plugged-ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 855.385985] env[69992]: DEBUG oslo_concurrency.lockutils [req-1e63ffa5-3d77-4f4f-9b48-2f07cb372d2b req-9df8e7ea-8f3d-4dc8-9a43-f4089b8033bc service nova] Acquiring lock "93b78a8b-389c-4114-8c1d-da80146d80f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.386240] env[69992]: DEBUG oslo_concurrency.lockutils [req-1e63ffa5-3d77-4f4f-9b48-2f07cb372d2b req-9df8e7ea-8f3d-4dc8-9a43-f4089b8033bc service nova] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.386404] env[69992]: DEBUG oslo_concurrency.lockutils [req-1e63ffa5-3d77-4f4f-9b48-2f07cb372d2b req-9df8e7ea-8f3d-4dc8-9a43-f4089b8033bc service nova] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.386584] env[69992]: DEBUG nova.compute.manager [req-1e63ffa5-3d77-4f4f-9b48-2f07cb372d2b req-9df8e7ea-8f3d-4dc8-9a43-f4089b8033bc service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] No waiting events found dispatching network-vif-plugged-ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 855.386742] env[69992]: WARNING nova.compute.manager [req-1e63ffa5-3d77-4f4f-9b48-2f07cb372d2b req-9df8e7ea-8f3d-4dc8-9a43-f4089b8033bc service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Received unexpected event network-vif-plugged-ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 for instance with vm_state building and task_state spawning. [ 855.631400] env[69992]: DEBUG nova.network.neutron [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Updating instance_info_cache with network_info: [{"id": "ebe4280c-0d36-4d08-8c4b-cba51c7f80e9", "address": "fa:16:3e:ff:f0:a2", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebe4280c-0d", "ovs_interfaceid": "ebe4280c-0d36-4d08-8c4b-cba51c7f80e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.735530] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c41f1053-3e06-4ead-bcd7-1171d8b3e2aa tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.531s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.845537] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c503c3b-4930-4e2e-8df7-bdbec0fde4d1 tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.211s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.877096] env[69992]: DEBUG oslo_vmware.api [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896579, 'name': PowerOnVM_Task, 'duration_secs': 0.937315} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.879302] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.879302] env[69992]: INFO nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Took 11.30 seconds to spawn the instance on the hypervisor. [ 855.879302] env[69992]: DEBUG nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.880216] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4cc1f1-bf98-4cf4-8b33-e0149118ad74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.984701] env[69992]: DEBUG nova.network.neutron [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Successfully updated port: fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.135241] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Releasing lock "refresh_cache-93b78a8b-389c-4114-8c1d-da80146d80f3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.135588] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Instance network_info: |[{"id": "ebe4280c-0d36-4d08-8c4b-cba51c7f80e9", "address": "fa:16:3e:ff:f0:a2", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebe4280c-0d", "ovs_interfaceid": "ebe4280c-0d36-4d08-8c4b-cba51c7f80e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 856.136068] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:f0:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebe4280c-0d36-4d08-8c4b-cba51c7f80e9', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.151584] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Creating folder: Project (46a2d84b932449edb69966a5884990d9). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.151909] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d040743e-1019-4087-9079-79d4f8216298 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.168459] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Created folder: Project (46a2d84b932449edb69966a5884990d9) in parent group-v581821. [ 856.170026] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Creating folder: Instances. Parent ref: group-v581834. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.170026] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e7fd8ce-8400-4ef8-8ac2-6f671bc1d350 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.185970] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Created folder: Instances in parent group-v581834. [ 856.186414] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.186640] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.186846] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4d78e9b-0dd8-40be-9cc1-a582aeaafdad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.211754] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.211754] env[69992]: value = "task-2896582" [ 856.211754] env[69992]: _type = "Task" [ 856.211754] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.225770] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896582, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.405604] env[69992]: INFO nova.compute.manager [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Took 16.15 seconds to build instance. [ 856.489256] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "refresh_cache-e934fc79-f7c5-4ca9-9f81-85467c1e9b45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.489638] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquired lock "refresh_cache-e934fc79-f7c5-4ca9-9f81-85467c1e9b45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.490083] env[69992]: DEBUG nova.network.neutron [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.600021] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1406a9f1-b265-409c-bf30-cf3247fe8279 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.609868] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004c90c9-a274-4a1c-ae4b-605e0599bcd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.647137] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888f0e9f-8d66-4722-8f9e-a7a8cdb9148e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.658846] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33747a1-daca-4b50-95de-1d2afcd0c7b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.681755] env[69992]: DEBUG nova.compute.provider_tree [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.724822] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896582, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.908355] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08315d07-c2e8-42c2-b462-727309f0f6a2 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.659s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.099303] env[69992]: DEBUG nova.network.neutron [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.185304] env[69992]: DEBUG nova.scheduler.client.report [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.226162] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896582, 'name': CreateVM_Task, 'duration_secs': 0.690229} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.226349] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.227994] env[69992]: DEBUG oslo_vmware.service [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb13c51-1946-45de-ba71-dc18ffb875b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.235609] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.235609] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.236029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 857.236294] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef2ebfdc-9c95-4cdc-8431-66c45381a9f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.243676] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 857.243676] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52337a17-92ac-45d1-ccc5-ff1e4b9c29ba" [ 857.243676] env[69992]: _type = "Task" [ 857.243676] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.254654] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52337a17-92ac-45d1-ccc5-ff1e4b9c29ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.691921] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.692498] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 857.699277] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.144s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.702025] env[69992]: INFO nova.compute.claims [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.732245] env[69992]: DEBUG nova.network.neutron [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Updating instance_info_cache with network_info: [{"id": "fc767b62-dfd6-429e-84f0-140bda053ff7", "address": "fa:16:3e:a4:a2:13", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc767b62-df", "ovs_interfaceid": "fc767b62-dfd6-429e-84f0-140bda053ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.758520] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.758712] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.762506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.762506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.762506] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.762506] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f9aa80f-e5a3-4c34-9d26-cdd6486bdd1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.774945] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.774945] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.776198] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5136a7f-9a75-4d67-b726-3d5dc591377f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.788098] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-600a4485-0aae-49e2-9b34-24b3794fda1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.795585] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 857.795585] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52aec634-6137-f19a-0305-c1236379fefb" [ 857.795585] env[69992]: _type = "Task" [ 857.795585] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.809089] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52aec634-6137-f19a-0305-c1236379fefb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.068556] env[69992]: DEBUG nova.network.neutron [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Successfully updated port: 0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.208621] env[69992]: DEBUG nova.compute.utils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 858.216673] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 858.217027] env[69992]: DEBUG nova.network.neutron [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 858.237286] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Releasing lock "refresh_cache-e934fc79-f7c5-4ca9-9f81-85467c1e9b45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.237286] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Instance network_info: |[{"id": "fc767b62-dfd6-429e-84f0-140bda053ff7", "address": "fa:16:3e:a4:a2:13", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc767b62-df", "ovs_interfaceid": "fc767b62-dfd6-429e-84f0-140bda053ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 858.237960] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:a2:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc767b62-dfd6-429e-84f0-140bda053ff7', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.249359] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Creating folder: Project (8a948803e8594dd593233e2b55cf0925). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 858.250972] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d01c88d3-5111-446d-817d-b52908f64fdb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.265634] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Created folder: Project (8a948803e8594dd593233e2b55cf0925) in parent group-v581821. [ 858.266035] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Creating folder: Instances. Parent ref: group-v581837. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 858.266289] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc3073bf-9311-4e2c-ba0a-4941ee041d4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.282214] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Created folder: Instances in parent group-v581837. [ 858.283432] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 858.283720] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 858.284790] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d248b105-d8cf-4fa3-8062-f737871ecce9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.316904] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 858.317506] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Creating directory with path [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.317708] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.317708] env[69992]: value = "task-2896585" [ 858.317708] env[69992]: _type = "Task" [ 858.317708] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.317852] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f160348-f57d-4dc0-8337-f16bdc352d78 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.334398] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896585, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.344740] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "1d5722e1-5a48-4212-bbc7-527a3739db6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.344941] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.351769] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Created directory with path [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.352025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Fetch image to [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 858.352239] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Downloading image file data eb50549f-9db8-4c15-a738-0e4b1e9e33fb to [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk on the data store datastore1 {{(pid=69992) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 858.353602] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67beb13-a5df-4444-82e3-25e0e03f63fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.369261] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15af5e12-8378-4ab3-b80a-10d7ef6081e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.389069] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b200d226-e208-4b0e-aed0-10ef3b33f57f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.437781] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f66e99f-ea6c-4eff-aaa5-7b832336afe3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.447652] env[69992]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-89cb2267-0a92-4e17-ba36-5f47b6039386 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.472039] env[69992]: DEBUG nova.policy [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd65ae0068ef6437ab1af36384644513a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da546e986828460e958e2eed165bf47e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.560466] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Downloading image file data eb50549f-9db8-4c15-a738-0e4b1e9e33fb to the data store datastore1 {{(pid=69992) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 858.571229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "refresh_cache-068507bb-ee7a-44f7-b315-7d4b2b70e735" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.571373] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquired lock "refresh_cache-068507bb-ee7a-44f7-b315-7d4b2b70e735" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.571528] env[69992]: DEBUG nova.network.neutron [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.639385] env[69992]: DEBUG oslo_vmware.rw_handles [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 858.718987] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 858.832900] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896585, 'name': CreateVM_Task, 'duration_secs': 0.437258} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.833463] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.834165] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.834325] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.834626] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 858.834875] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bee4e85-9361-4cb1-9727-917593ca04f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.842491] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 858.842491] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5245d09a-3f52-3811-a5f8-3ad4c0f6ebcb" [ 858.842491] env[69992]: _type = "Task" [ 858.842491] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.853399] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 858.860052] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5245d09a-3f52-3811-a5f8-3ad4c0f6ebcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.959030] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f82167-7490-472f-8a49-8d420672a4b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.972417] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6f171e-0b12-443a-8f6a-7e3fd62db909 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.012370] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b40d0d0-4445-4d6f-9723-11b6c8c22c57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.026415] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829f85d3-7354-439b-8840-ef8ee9f0015b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.040527] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.040775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.051991] env[69992]: DEBUG nova.compute.provider_tree [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.083991] env[69992]: DEBUG nova.compute.manager [None req-2bf2c2f2-20aa-46df-aa47-ea6455dec147 tempest-ServerDiagnosticsV248Test-71468773 tempest-ServerDiagnosticsV248Test-71468773-project-admin] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 859.085561] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa802cd-248a-49fc-996a-b4899a09ff75 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.095101] env[69992]: INFO nova.compute.manager [None req-2bf2c2f2-20aa-46df-aa47-ea6455dec147 tempest-ServerDiagnosticsV248Test-71468773 tempest-ServerDiagnosticsV248Test-71468773-project-admin] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Retrieving diagnostics [ 859.095933] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0fba12-cd25-4f38-b724-bf37ce2cd34f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.162506] env[69992]: DEBUG nova.network.neutron [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.358970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.359253] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.359506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.368369] env[69992]: DEBUG oslo_vmware.rw_handles [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 859.368768] env[69992]: DEBUG oslo_vmware.rw_handles [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 859.391077] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.442097] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Downloaded image file data eb50549f-9db8-4c15-a738-0e4b1e9e33fb to vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk on the data store datastore1 {{(pid=69992) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 859.445774] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 859.446297] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Copying Virtual Disk [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk to [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.446596] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1585758f-5004-42e4-aeb2-231777d923b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.457502] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 859.457502] env[69992]: value = "task-2896586" [ 859.457502] env[69992]: _type = "Task" [ 859.457502] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.469821] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.555566] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 859.558422] env[69992]: DEBUG nova.scheduler.client.report [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.587145] env[69992]: DEBUG nova.network.neutron [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Updating instance_info_cache with network_info: [{"id": "0f5065fc-9e45-41f9-a922-76f438876fea", "address": "fa:16:3e:de:43:7f", "network": {"id": "78124c99-daa7-4df7-a2d7-c611435a8b0a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-508185452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f02818bc6a9c48b0810968803c8be436", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f5065fc-9e", "ovs_interfaceid": "0f5065fc-9e45-41f9-a922-76f438876fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.735556] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 859.770021] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 859.770290] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.771534] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 859.771620] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.772855] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 859.772855] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 859.773354] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 859.775195] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 859.775195] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 859.775195] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 859.775403] env[69992]: DEBUG nova.virt.hardware [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 859.776290] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da83a9d-aaf2-4365-ab16-6c70ee0b9cba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.790820] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a1218f-0e7f-4e12-958c-478b052de6de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.917475] env[69992]: DEBUG nova.network.neutron [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Successfully created port: 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.974891] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.066918] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.067607] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 860.079022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.481s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.080139] env[69992]: INFO nova.compute.claims [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.091069] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Releasing lock "refresh_cache-068507bb-ee7a-44f7-b315-7d4b2b70e735" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.091450] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Instance network_info: |[{"id": "0f5065fc-9e45-41f9-a922-76f438876fea", "address": "fa:16:3e:de:43:7f", "network": {"id": "78124c99-daa7-4df7-a2d7-c611435a8b0a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-508185452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f02818bc6a9c48b0810968803c8be436", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f5065fc-9e", "ovs_interfaceid": "0f5065fc-9e45-41f9-a922-76f438876fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 860.092302] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:43:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f5065fc-9e45-41f9-a922-76f438876fea', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.106815] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Creating folder: Project (f02818bc6a9c48b0810968803c8be436). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.108265] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.108667] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b56e48c-24c7-4b3d-8787-cb7206d5a097 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.125331] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Created folder: Project (f02818bc6a9c48b0810968803c8be436) in parent group-v581821. [ 860.125331] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Creating folder: Instances. Parent ref: group-v581840. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.125331] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c75b75e5-82ea-4633-bba6-82ff09acdbcd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.143289] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Created folder: Instances in parent group-v581840. [ 860.144507] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 860.146177] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.146177] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8419725b-d509-43fa-87b7-1c58a6c07159 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.172730] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.172730] env[69992]: value = "task-2896589" [ 860.172730] env[69992]: _type = "Task" [ 860.172730] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.190261] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896589, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.476978] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896586, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.586371] env[69992]: DEBUG nova.compute.utils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.591741] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 860.591741] env[69992]: DEBUG nova.network.neutron [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.686286] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896589, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.703528] env[69992]: DEBUG nova.policy [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b2730128c5e487ea5d9b5b0ae9313ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93d4e973e49e4cf98096fa30ded68db1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 860.878478] env[69992]: DEBUG nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Received event network-changed-ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.878478] env[69992]: DEBUG nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Refreshing instance network info cache due to event network-changed-ebe4280c-0d36-4d08-8c4b-cba51c7f80e9. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 860.878478] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Acquiring lock "refresh_cache-93b78a8b-389c-4114-8c1d-da80146d80f3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.878478] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Acquired lock "refresh_cache-93b78a8b-389c-4114-8c1d-da80146d80f3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.878478] env[69992]: DEBUG nova.network.neutron [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Refreshing network info cache for port ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 860.928454] env[69992]: DEBUG nova.compute.manager [req-7c17e21e-257b-4344-91b7-01cf69d8ce54 req-0eb1386b-484f-4b31-a5b9-ca99b8212129 service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Received event network-vif-plugged-0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.928971] env[69992]: DEBUG oslo_concurrency.lockutils [req-7c17e21e-257b-4344-91b7-01cf69d8ce54 req-0eb1386b-484f-4b31-a5b9-ca99b8212129 service nova] Acquiring lock "068507bb-ee7a-44f7-b315-7d4b2b70e735-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.929353] env[69992]: DEBUG oslo_concurrency.lockutils [req-7c17e21e-257b-4344-91b7-01cf69d8ce54 req-0eb1386b-484f-4b31-a5b9-ca99b8212129 service nova] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.930713] env[69992]: DEBUG oslo_concurrency.lockutils [req-7c17e21e-257b-4344-91b7-01cf69d8ce54 req-0eb1386b-484f-4b31-a5b9-ca99b8212129 service nova] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.930922] env[69992]: DEBUG nova.compute.manager [req-7c17e21e-257b-4344-91b7-01cf69d8ce54 req-0eb1386b-484f-4b31-a5b9-ca99b8212129 service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] No waiting events found dispatching network-vif-plugged-0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 860.931106] env[69992]: WARNING nova.compute.manager [req-7c17e21e-257b-4344-91b7-01cf69d8ce54 req-0eb1386b-484f-4b31-a5b9-ca99b8212129 service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Received unexpected event network-vif-plugged-0f5065fc-9e45-41f9-a922-76f438876fea for instance with vm_state building and task_state spawning. [ 860.973854] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896586, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.188274} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.974197] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Copied Virtual Disk [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk to [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.974497] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleting the datastore file [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/tmp-sparse.vmdk {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.974728] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6f8e55e-03bd-4bc6-8121-4ebfa73f7393 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.985382] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 860.985382] env[69992]: value = "task-2896590" [ 860.985382] env[69992]: _type = "Task" [ 860.985382] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.995699] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.091753] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 861.190050] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896589, 'name': CreateVM_Task, 'duration_secs': 0.61464} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.192595] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.194041] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.194041] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.194212] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 861.197017] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d57cd79-2815-4e14-aa07-97a2aa6ec8df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.202688] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 861.202688] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52773003-4e44-4cb7-c3cb-0e98c4bab41e" [ 861.202688] env[69992]: _type = "Task" [ 861.202688] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.214577] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52773003-4e44-4cb7-c3cb-0e98c4bab41e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.348824] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8caaed-9a9d-403d-9d91-a67dcc683887 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.358118] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65a2cea-aacc-4e57-8856-fba3220f9b07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.397403] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92caefd7-ed0a-48e6-8b9d-d86a6d20b848 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.406560] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2315f8d7-7852-4c6b-8d30-6fcc1478fd28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.423450] env[69992]: DEBUG nova.compute.provider_tree [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.466310] env[69992]: DEBUG nova.network.neutron [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Successfully created port: 2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.496602] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042562} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.496850] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.497066] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Moving file from [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa/eb50549f-9db8-4c15-a738-0e4b1e9e33fb to [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb. {{(pid=69992) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 861.497313] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-196a0d81-b57d-4b32-9925-7104fe7549e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.506074] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 861.506074] env[69992]: value = "task-2896591" [ 861.506074] env[69992]: _type = "Task" [ 861.506074] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.515736] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896591, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.715626] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.715770] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.716065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.820886] env[69992]: DEBUG nova.network.neutron [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Updated VIF entry in instance network info cache for port ebe4280c-0d36-4d08-8c4b-cba51c7f80e9. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 861.820886] env[69992]: DEBUG nova.network.neutron [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Updating instance_info_cache with network_info: [{"id": "ebe4280c-0d36-4d08-8c4b-cba51c7f80e9", "address": "fa:16:3e:ff:f0:a2", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebe4280c-0d", "ovs_interfaceid": "ebe4280c-0d36-4d08-8c4b-cba51c7f80e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.927014] env[69992]: DEBUG nova.scheduler.client.report [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.023840] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896591, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.189687} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.023840] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] File moved {{(pid=69992) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 862.023840] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Cleaning up location [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 862.023840] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleting the datastore file [datastore1] vmware_temp/400c5030-4e91-4a35-942a-def235972afa {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.023840] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-daa89cd0-dab7-44cf-a95b-ad5b0c3a3f37 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.031704] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 862.031704] env[69992]: value = "task-2896592" [ 862.031704] env[69992]: _type = "Task" [ 862.031704] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.041633] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.109981] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 862.141942] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 862.142282] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.142470] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 862.142595] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.143295] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 862.147731] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 862.148406] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 862.148406] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 862.148406] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 862.148538] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 862.148695] env[69992]: DEBUG nova.virt.hardware [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 862.149589] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924228e8-eafc-4644-a11e-3ceff6150b18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.160976] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fab6ca-93a9-4c8d-9000-48492a7645e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.330677] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Releasing lock "refresh_cache-93b78a8b-389c-4114-8c1d-da80146d80f3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.330798] env[69992]: DEBUG nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Received event network-vif-plugged-fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 862.332970] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Acquiring lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.332970] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.332970] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.332970] env[69992]: DEBUG nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] No waiting events found dispatching network-vif-plugged-fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 862.332970] env[69992]: WARNING nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Received unexpected event network-vif-plugged-fc767b62-dfd6-429e-84f0-140bda053ff7 for instance with vm_state building and task_state spawning. [ 862.333475] env[69992]: DEBUG nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Received event network-changed-fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 862.333475] env[69992]: DEBUG nova.compute.manager [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Refreshing instance network info cache due to event network-changed-fc767b62-dfd6-429e-84f0-140bda053ff7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 862.333475] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Acquiring lock "refresh_cache-e934fc79-f7c5-4ca9-9f81-85467c1e9b45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.333475] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Acquired lock "refresh_cache-e934fc79-f7c5-4ca9-9f81-85467c1e9b45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.337514] env[69992]: DEBUG nova.network.neutron [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Refreshing network info cache for port fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.400542] env[69992]: DEBUG nova.network.neutron [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Successfully updated port: 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 862.435447] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.435914] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.439460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.048s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.445698] env[69992]: INFO nova.compute.claims [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.549468] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079372} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.549468] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.549468] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-465b0982-cc9e-4093-b7fe-10a1f442c98a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.557734] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 862.557734] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fbe30a-c873-ac8c-60c6-bd590c7adf97" [ 862.557734] env[69992]: _type = "Task" [ 862.557734] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.570489] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fbe30a-c873-ac8c-60c6-bd590c7adf97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.765542] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.765818] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.766036] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.766255] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.772039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.772861] env[69992]: INFO nova.compute.manager [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Terminating instance [ 862.781941] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.782256] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "eba81db1-973c-4981-baca-cb98e4087510" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.906565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.906721] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.907204] env[69992]: DEBUG nova.network.neutron [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.954457] env[69992]: DEBUG nova.compute.utils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 862.957103] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 862.957103] env[69992]: DEBUG nova.network.neutron [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.041911] env[69992]: DEBUG nova.policy [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.076491] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fbe30a-c873-ac8c-60c6-bd590c7adf97, 'name': SearchDatastore_Task, 'duration_secs': 0.026192} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.076491] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.076491] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 93b78a8b-389c-4114-8c1d-da80146d80f3/93b78a8b-389c-4114-8c1d-da80146d80f3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.076491] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.076709] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.076709] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c40c205-7680-4ad8-8b44-06f35827144c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.076768] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1b40d5a-2ac9-4ca8-a326-8b6f38d70ad7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.088573] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 863.088573] env[69992]: value = "task-2896593" [ 863.088573] env[69992]: _type = "Task" [ 863.088573] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.096821] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.097274] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.101236] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e169e657-9e71-40a2-bbab-52138928c51f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.103471] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.109245] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 863.109245] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523f7d85-f611-b018-7e40-a0af92c40716" [ 863.109245] env[69992]: _type = "Task" [ 863.109245] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.130022] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f7d85-f611-b018-7e40-a0af92c40716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.199898] env[69992]: DEBUG nova.network.neutron [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Updated VIF entry in instance network info cache for port fc767b62-dfd6-429e-84f0-140bda053ff7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 863.200354] env[69992]: DEBUG nova.network.neutron [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Updating instance_info_cache with network_info: [{"id": "fc767b62-dfd6-429e-84f0-140bda053ff7", "address": "fa:16:3e:a4:a2:13", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc767b62-df", "ovs_interfaceid": "fc767b62-dfd6-429e-84f0-140bda053ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.287276] env[69992]: DEBUG nova.compute.manager [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 863.287276] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 863.287276] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9933eba3-b3d1-4a4c-8074-ddd6dd9717db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.301297] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 863.304979] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "c1d73002-6e69-41a6-95b3-34dccaf872ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.306251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.311923] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 863.312511] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf51aa98-f068-41fa-872e-afaafb0c7f3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.324286] env[69992]: DEBUG oslo_vmware.api [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 863.324286] env[69992]: value = "task-2896594" [ 863.324286] env[69992]: _type = "Task" [ 863.324286] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.333938] env[69992]: DEBUG oslo_vmware.api [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.417768] env[69992]: DEBUG nova.network.neutron [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Successfully updated port: 2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.457694] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.467047] env[69992]: DEBUG nova.network.neutron [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.545102] env[69992]: DEBUG nova.network.neutron [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Successfully created port: e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.603378] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896593, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.620568] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f7d85-f611-b018-7e40-a0af92c40716, 'name': SearchDatastore_Task, 'duration_secs': 0.017821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.626596] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05e09356-1a92-4ce4-a0dd-da621e12a1ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.634471] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 863.634471] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522188dd-c070-698e-b757-210457a88e05" [ 863.634471] env[69992]: _type = "Task" [ 863.634471] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.644111] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522188dd-c070-698e-b757-210457a88e05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.704166] env[69992]: DEBUG oslo_concurrency.lockutils [req-9332294a-13a5-4d2e-89b5-ba3a96be9bd4 req-6e56e475-74d8-47f1-a1d3-5e111964c41d service nova] Releasing lock "refresh_cache-e934fc79-f7c5-4ca9-9f81-85467c1e9b45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.767614] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726624a5-aaf0-44ff-b413-fa45e3400b64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.776090] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b86cd38-58ca-4bca-9139-9b3c9fd8f705 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.814911] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 863.819089] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c298ea4-01d7-4d32-9a33-74e76eef4bf4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.832307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41034a43-2e37-4b01-bc51-2cd4b0276bb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.840080] env[69992]: DEBUG oslo_vmware.api [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896594, 'name': PowerOffVM_Task, 'duration_secs': 0.251391} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.841376] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.841784] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 863.841996] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 863.843969] env[69992]: DEBUG nova.network.neutron [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.845998] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5be7fd0-c2b9-4dff-99aa-56ef1c3876ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.858828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "a9274dfc-afbd-419b-a98b-053d71a05d7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.859018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.859988] env[69992]: DEBUG nova.compute.provider_tree [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.921157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.921375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.921640] env[69992]: DEBUG nova.network.neutron [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.945943] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 863.945943] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 863.945943] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Deleting the datastore file [datastore2] e74441fc-361f-4e0b-bfdd-6f8213db51e3 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 863.945943] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a17d998-77d3-4f60-ba97-5cf2ee952912 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.953685] env[69992]: DEBUG oslo_vmware.api [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for the task: (returnval){ [ 863.953685] env[69992]: value = "task-2896596" [ 863.953685] env[69992]: _type = "Task" [ 863.953685] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.972126] env[69992]: DEBUG oslo_vmware.api [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.103629] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700921} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.103629] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 93b78a8b-389c-4114-8c1d-da80146d80f3/93b78a8b-389c-4114-8c1d-da80146d80f3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.103629] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.103903] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7658564-c74b-4c3a-835a-f02cfdb4db50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.113175] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 864.113175] env[69992]: value = "task-2896597" [ 864.113175] env[69992]: _type = "Task" [ 864.113175] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.124023] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896597, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.145628] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522188dd-c070-698e-b757-210457a88e05, 'name': SearchDatastore_Task, 'duration_secs': 0.062624} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.145806] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.146130] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e934fc79-f7c5-4ca9-9f81-85467c1e9b45/e934fc79-f7c5-4ca9-9f81-85467c1e9b45.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 864.147396] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.147396] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.147396] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c0fa103-d658-4bf7-bef1-e67c2d513806 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.149955] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50b3d575-6f45-4679-b70a-f8cf3425160d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.157443] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 864.157443] env[69992]: value = "task-2896598" [ 864.157443] env[69992]: _type = "Task" [ 864.157443] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.161764] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.161764] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.163041] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c58b7f3c-b7c4-46da-80bb-081c05bc73af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.168523] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.172339] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 864.172339] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5212e453-2d02-db88-b514-abf7e8df618d" [ 864.172339] env[69992]: _type = "Task" [ 864.172339] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.184665] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5212e453-2d02-db88-b514-abf7e8df618d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.335771] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.351732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.352127] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Instance network_info: |[{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 864.352564] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:47:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f86db68-8a81-421c-aa9b-4daab0584c4c', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 864.360319] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Creating folder: Project (da546e986828460e958e2eed165bf47e). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 864.360594] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e9eb1de-a9b9-41c5-9049-351b341098e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.362940] env[69992]: DEBUG nova.scheduler.client.report [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.378185] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Created folder: Project (da546e986828460e958e2eed165bf47e) in parent group-v581821. [ 864.378391] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Creating folder: Instances. Parent ref: group-v581843. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 864.378620] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e4591f1-1bf0-4cf0-b8e8-ac2dba48aa60 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.392504] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Created folder: Instances in parent group-v581843. [ 864.392504] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 864.392504] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 864.392504] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18d3f50e-aa07-4768-9c53-159fa30ec93e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.416641] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 864.416641] env[69992]: value = "task-2896601" [ 864.416641] env[69992]: _type = "Task" [ 864.416641] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.429112] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896601, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.466189] env[69992]: DEBUG oslo_vmware.api [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Task: {'id': task-2896596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159367} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.466586] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 864.466697] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 864.466887] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 864.467136] env[69992]: INFO nova.compute.manager [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Took 1.18 seconds to destroy the instance on the hypervisor. [ 864.467381] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 864.467943] env[69992]: DEBUG nova.compute.manager [-] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 864.467943] env[69992]: DEBUG nova.network.neutron [-] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.474328] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.494359] env[69992]: DEBUG nova.network.neutron [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.515698] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 864.516043] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.516222] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.516411] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.516586] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.516742] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 864.516975] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 864.517175] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 864.517378] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 864.517585] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 864.517813] env[69992]: DEBUG nova.virt.hardware [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 864.518797] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff84ef54-7d01-4f20-98d6-af9512e23964 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.536172] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1634f99a-a7f5-4f23-bb8a-e7b27942e716 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.623132] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "27580836-7ab5-4e64-a985-3e6fc22a8b77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.623390] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.632522] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896597, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074877} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.633711] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.633711] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d8efd9-8d2d-47e7-a9c4-d08b15668c0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.663717] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 93b78a8b-389c-4114-8c1d-da80146d80f3/93b78a8b-389c-4114-8c1d-da80146d80f3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.668260] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3444e7f8-73b5-4a0e-a8d2-743130074961 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.703204] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896598, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.707282] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5212e453-2d02-db88-b514-abf7e8df618d, 'name': SearchDatastore_Task, 'duration_secs': 0.013659} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.707567] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 864.707567] env[69992]: value = "task-2896602" [ 864.707567] env[69992]: _type = "Task" [ 864.707567] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.708535] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d18cee66-8728-47a4-9ee5-8558ce91990d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.725761] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896602, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.725761] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 864.725761] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5202762e-51f4-734c-8fc5-4240bae65df4" [ 864.725761] env[69992]: _type = "Task" [ 864.725761] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.732801] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5202762e-51f4-734c-8fc5-4240bae65df4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.859466] env[69992]: DEBUG nova.network.neutron [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance_info_cache with network_info: [{"id": "2584dc71-913f-4c9b-922c-f8b28530b82f", "address": "fa:16:3e:42:68:21", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2584dc71-91", "ovs_interfaceid": "2584dc71-913f-4c9b-922c-f8b28530b82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.868099] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.868829] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 864.872053] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.763s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.873326] env[69992]: INFO nova.compute.claims [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.929687] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896601, 'name': CreateVM_Task, 'duration_secs': 0.416806} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.929871] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.930578] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.930813] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.931250] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 864.931792] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5e2d0fb-fcab-435a-8556-17bd4a06e909 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.947261] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 864.947261] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5271d35c-310c-d643-f2a0-299b32dec23f" [ 864.947261] env[69992]: _type = "Task" [ 864.947261] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.958147] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5271d35c-310c-d643-f2a0-299b32dec23f, 'name': SearchDatastore_Task, 'duration_secs': 0.009974} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.958612] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.958865] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.959147] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.179911] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562933} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.180422] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e934fc79-f7c5-4ca9-9f81-85467c1e9b45/e934fc79-f7c5-4ca9-9f81-85467c1e9b45.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.180688] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.180941] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f721339a-ca17-432c-bdb4-f657d9c56aeb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.189136] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 865.189136] env[69992]: value = "task-2896603" [ 865.189136] env[69992]: _type = "Task" [ 865.189136] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.201315] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896603, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.220794] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896602, 'name': ReconfigVM_Task, 'duration_secs': 0.331588} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.221102] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 93b78a8b-389c-4114-8c1d-da80146d80f3/93b78a8b-389c-4114-8c1d-da80146d80f3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.221750] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfc20988-b938-401b-bd12-33d0285c2c26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.233233] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 865.233233] env[69992]: value = "task-2896604" [ 865.233233] env[69992]: _type = "Task" [ 865.233233] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.233233] env[69992]: DEBUG nova.network.neutron [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Successfully updated port: e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.238295] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5202762e-51f4-734c-8fc5-4240bae65df4, 'name': SearchDatastore_Task, 'duration_secs': 0.013012} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.240882] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.241241] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 068507bb-ee7a-44f7-b315-7d4b2b70e735/068507bb-ee7a-44f7-b315-7d4b2b70e735.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.241587] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.241810] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.242032] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9aeff01e-a3e9-4b28-b522-54a5f5706999 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.245063] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8f16421-564e-4018-83cb-2cbb3fb5e283 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.252469] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896604, 'name': Rename_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.254045] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 865.254045] env[69992]: value = "task-2896605" [ 865.254045] env[69992]: _type = "Task" [ 865.254045] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.259611] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.259752] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 865.261414] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31edbb0d-fc5b-49b0-a4a0-3b8688843b3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.275828] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.277730] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 865.277730] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d79d82-7e20-d403-749c-f83941baaf56" [ 865.277730] env[69992]: _type = "Task" [ 865.277730] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.287652] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d79d82-7e20-d403-749c-f83941baaf56, 'name': SearchDatastore_Task, 'duration_secs': 0.011042} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.288632] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efa22e18-e850-47bb-9ad0-fd0802dff402 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.295223] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 865.295223] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e9b460-1938-d0d5-906c-4cada4c3e3b8" [ 865.295223] env[69992]: _type = "Task" [ 865.295223] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.307498] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e9b460-1938-d0d5-906c-4cada4c3e3b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.355614] env[69992]: DEBUG nova.network.neutron [-] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.365935] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.365935] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Instance network_info: |[{"id": "2584dc71-913f-4c9b-922c-f8b28530b82f", "address": "fa:16:3e:42:68:21", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2584dc71-91", "ovs_interfaceid": "2584dc71-913f-4c9b-922c-f8b28530b82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 865.366230] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:68:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2584dc71-913f-4c9b-922c-f8b28530b82f', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.371932] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Creating folder: Project (93d4e973e49e4cf98096fa30ded68db1). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.372311] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa7106ff-5fbd-4738-a3d4-674a08c95476 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.377934] env[69992]: DEBUG nova.compute.utils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 865.381178] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 865.381344] env[69992]: DEBUG nova.network.neutron [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 865.390670] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Created folder: Project (93d4e973e49e4cf98096fa30ded68db1) in parent group-v581821. [ 865.390670] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Creating folder: Instances. Parent ref: group-v581846. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.391508] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5571160f-4533-4429-b867-5ef77c5237be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.411911] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Created folder: Instances in parent group-v581846. [ 865.411911] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.411911] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.411911] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc9bad9d-6e23-4748-b7f8-90ebdcb7d330 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.435130] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.435130] env[69992]: value = "task-2896608" [ 865.435130] env[69992]: _type = "Task" [ 865.435130] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.440405] env[69992]: DEBUG nova.policy [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5d33a6e0475f4b0d9a1e998a5e6f7e7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60fa5c5488fd4ac38c8c8556109c7413', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.453235] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896608, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.614970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.615384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.703795] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896603, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080174} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.703902] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.707060] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5aab8fa-6843-4bd6-bb65-9c9aeca0ed7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.734038] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] e934fc79-f7c5-4ca9-9f81-85467c1e9b45/e934fc79-f7c5-4ca9-9f81-85467c1e9b45.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.734464] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bee06af-9139-4f79-a6cf-0891d1ecc511 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.753328] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.753937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.753937] env[69992]: DEBUG nova.network.neutron [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.774399] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896604, 'name': Rename_Task, 'duration_secs': 0.167814} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.780441] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.781872] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 865.781872] env[69992]: value = "task-2896609" [ 865.781872] env[69992]: _type = "Task" [ 865.781872] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.781872] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896605, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.782089] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-992da6d1-0cc9-4e4a-ba30-8f176ca81186 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.792137] env[69992]: DEBUG nova.network.neutron [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Successfully created port: 58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.800707] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 865.800707] env[69992]: value = "task-2896610" [ 865.800707] env[69992]: _type = "Task" [ 865.800707] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.801551] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.815984] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896610, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.821193] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e9b460-1938-d0d5-906c-4cada4c3e3b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010514} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.821478] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.821772] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ee4c0f2b-44cb-4b37-8e4a-5706b9932144/ee4c0f2b-44cb-4b37-8e4a-5706b9932144.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.822082] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-086b6cd4-dc38-4cfe-a0d9-f1df37aeaa88 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.831128] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 865.831128] env[69992]: value = "task-2896611" [ 865.831128] env[69992]: _type = "Task" [ 865.831128] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.840668] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896611, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.861403] env[69992]: INFO nova.compute.manager [-] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Took 1.39 seconds to deallocate network for instance. [ 865.884794] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 865.950506] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896608, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.193969] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9560682c-ba74-43ef-9cad-67dfbb200585 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.203394] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a44ccc-4635-4521-85ab-030e43fe2d70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.239981] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e8d24-25e0-458d-8764-957109fe6901 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.249388] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114e6ee1-3462-4b55-a6e9-1e2a9d617643 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.267203] env[69992]: DEBUG nova.compute.provider_tree [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 866.278568] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552817} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.279425] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 068507bb-ee7a-44f7-b315-7d4b2b70e735/068507bb-ee7a-44f7-b315-7d4b2b70e735.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.279654] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.279910] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55548724-06d8-4f40-9bbd-afc466143ba3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.290394] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 866.290394] env[69992]: value = "task-2896612" [ 866.290394] env[69992]: _type = "Task" [ 866.290394] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.298045] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896609, 'name': ReconfigVM_Task, 'duration_secs': 0.419126} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.299143] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Reconfigured VM instance instance-00000006 to attach disk [datastore1] e934fc79-f7c5-4ca9-9f81-85467c1e9b45/e934fc79-f7c5-4ca9-9f81-85467c1e9b45.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.300277] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aacf0cc3-cbe4-455a-a5d4-c9d73dbb01ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.304736] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.318256] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 866.318256] env[69992]: value = "task-2896613" [ 866.318256] env[69992]: _type = "Task" [ 866.318256] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.318877] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896610, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.329160] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896613, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.342389] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896611, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.369384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.390416] env[69992]: DEBUG nova.network.neutron [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.446753] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896608, 'name': CreateVM_Task, 'duration_secs': 0.69149} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.446926] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.447611] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.447750] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.448596] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 866.448596] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9382e8f-2255-4a48-a321-f8dcb9e31ca6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.453693] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 866.453693] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f3dc71-9b0a-396f-c011-700706946831" [ 866.453693] env[69992]: _type = "Task" [ 866.453693] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.462926] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f3dc71-9b0a-396f-c011-700706946831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.721341] env[69992]: DEBUG nova.network.neutron [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.732986] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Received event network-changed-0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 866.733187] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Refreshing instance network info cache due to event network-changed-0f5065fc-9e45-41f9-a922-76f438876fea. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 866.733454] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquiring lock "refresh_cache-068507bb-ee7a-44f7-b315-7d4b2b70e735" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.733629] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquired lock "refresh_cache-068507bb-ee7a-44f7-b315-7d4b2b70e735" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.733921] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Refreshing network info cache for port 0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.781057] env[69992]: DEBUG nova.compute.manager [req-9d7b8b03-bff7-4346-9e25-fff4393cc1ac req-3b65c0b9-3578-48b4-b8e6-e1245ac58322 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Received event network-vif-plugged-2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 866.781296] env[69992]: DEBUG oslo_concurrency.lockutils [req-9d7b8b03-bff7-4346-9e25-fff4393cc1ac req-3b65c0b9-3578-48b4-b8e6-e1245ac58322 service nova] Acquiring lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.781488] env[69992]: DEBUG oslo_concurrency.lockutils [req-9d7b8b03-bff7-4346-9e25-fff4393cc1ac req-3b65c0b9-3578-48b4-b8e6-e1245ac58322 service nova] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.781650] env[69992]: DEBUG oslo_concurrency.lockutils [req-9d7b8b03-bff7-4346-9e25-fff4393cc1ac req-3b65c0b9-3578-48b4-b8e6-e1245ac58322 service nova] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.781806] env[69992]: DEBUG nova.compute.manager [req-9d7b8b03-bff7-4346-9e25-fff4393cc1ac req-3b65c0b9-3578-48b4-b8e6-e1245ac58322 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] No waiting events found dispatching network-vif-plugged-2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.781964] env[69992]: WARNING nova.compute.manager [req-9d7b8b03-bff7-4346-9e25-fff4393cc1ac req-3b65c0b9-3578-48b4-b8e6-e1245ac58322 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Received unexpected event network-vif-plugged-2584dc71-913f-4c9b-922c-f8b28530b82f for instance with vm_state building and task_state spawning. [ 866.802138] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099516} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.803138] env[69992]: ERROR nova.scheduler.client.report [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [req-229947d6-3c08-49c7-9cb0-f4853fb3bfd0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-229947d6-3c08-49c7-9cb0-f4853fb3bfd0"}]} [ 866.803511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.806285] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e842ab-c1f6-42a3-b278-0800eb89fa42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.817696] env[69992]: DEBUG oslo_vmware.api [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896610, 'name': PowerOnVM_Task, 'duration_secs': 0.586603} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.827089] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.827360] env[69992]: INFO nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Took 17.71 seconds to spawn the instance on the hypervisor. [ 866.827548] env[69992]: DEBUG nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.836334] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 068507bb-ee7a-44f7-b315-7d4b2b70e735/068507bb-ee7a-44f7-b315-7d4b2b70e735.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.837372] env[69992]: DEBUG nova.scheduler.client.report [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 866.842175] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22be4ca8-833c-4dce-b16a-fba94ea0e41e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.847100] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2248f98f-6541-49e6-a5b8-8b37ea1915c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.874316] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896613, 'name': Rename_Task, 'duration_secs': 0.356279} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.878026] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.878026] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 866.878026] env[69992]: value = "task-2896614" [ 866.878026] env[69992]: _type = "Task" [ 866.878026] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.881877] env[69992]: DEBUG nova.scheduler.client.report [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 866.882235] env[69992]: DEBUG nova.compute.provider_tree [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 866.884830] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4907a50b-3f8c-423f-b818-7278d597838a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.895790] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896611, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555983} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.896731] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ee4c0f2b-44cb-4b37-8e4a-5706b9932144/ee4c0f2b-44cb-4b37-8e4a-5706b9932144.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.896731] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.897940] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 866.907512] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3ecc4fe-a7f9-4afd-994e-aaea2dc63a9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.911657] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 866.911657] env[69992]: value = "task-2896615" [ 866.911657] env[69992]: _type = "Task" [ 866.911657] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.911909] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.919172] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 866.919172] env[69992]: value = "task-2896616" [ 866.919172] env[69992]: _type = "Task" [ 866.919172] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.923797] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896615, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.925429] env[69992]: DEBUG nova.scheduler.client.report [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 866.943996] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 866.944853] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.944853] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 866.944853] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.944853] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 866.945057] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 866.945275] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 866.945487] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 866.945713] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 866.946245] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 866.946245] env[69992]: DEBUG nova.virt.hardware [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 866.947348] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0052fb-94b2-435c-a808-68fda6e1be22 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.954988] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.962979] env[69992]: DEBUG nova.scheduler.client.report [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 866.968045] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927b69c1-598f-40ae-b89a-10d5f0417d8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.978327] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f3dc71-9b0a-396f-c011-700706946831, 'name': SearchDatastore_Task, 'duration_secs': 0.024304} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.979201] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.979459] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.979712] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.979866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.980106] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.980620] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14cc8c05-ada9-4354-b773-5f4fa9d2fc56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.003049] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.003049] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.003406] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99f742c4-ebad-4587-b2d1-09884cdfd086 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.014047] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 867.014047] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a8ad61-9f09-dd08-84f8-3e7a68823ab9" [ 867.014047] env[69992]: _type = "Task" [ 867.014047] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.023428] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a8ad61-9f09-dd08-84f8-3e7a68823ab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.192310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.192786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.226221] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.226221] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Instance network_info: |[{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.226666] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:9f:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e64de32e-0e37-4777-91e7-8be0da0fa147', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.234891] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Creating folder: Project (dc6fa4e45f4c47c49d67e6efe2eb7a50). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.239013] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d2b5e25-f837-4cac-a589-548ac3f1683d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.255610] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Created folder: Project (dc6fa4e45f4c47c49d67e6efe2eb7a50) in parent group-v581821. [ 867.255890] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Creating folder: Instances. Parent ref: group-v581849. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.256178] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbcaec6d-5e8e-433c-90c6-9b1ff6b7165e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.271832] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Created folder: Instances in parent group-v581849. [ 867.272100] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.272294] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.272495] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5a2f875-cf5a-4026-8f7b-6405fd4190be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.297520] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.297520] env[69992]: value = "task-2896619" [ 867.297520] env[69992]: _type = "Task" [ 867.297520] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.307595] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896619, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.371134] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd89b20b-cd6b-4dbc-8726-9be6de4ef6af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.380517] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad25177-754c-4b74-b8cb-41464288a0be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.426997] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf56e58-33f9-497a-8e7a-942d388b86aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.434124] env[69992]: INFO nova.compute.manager [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Took 25.43 seconds to build instance. [ 867.444033] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896614, 'name': ReconfigVM_Task, 'duration_secs': 0.37994} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.453892] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 068507bb-ee7a-44f7-b315-7d4b2b70e735/068507bb-ee7a-44f7-b315-7d4b2b70e735.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.454942] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896615, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.455792] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1a5d003-5036-4f80-877b-5d47310dd51b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.459868] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c867ff97-659b-45f4-983b-726d617e272c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.468071] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076343} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.468992] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.469985] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea678736-81b2-4856-8aee-1acdfa496fc3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.481940] env[69992]: DEBUG nova.compute.provider_tree [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.486157] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 867.486157] env[69992]: value = "task-2896620" [ 867.486157] env[69992]: _type = "Task" [ 867.486157] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.510589] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] ee4c0f2b-44cb-4b37-8e4a-5706b9932144/ee4c0f2b-44cb-4b37-8e4a-5706b9932144.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.513275] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94c721e7-29d0-4b1f-a47f-196527a91c83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.532593] env[69992]: DEBUG nova.network.neutron [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Successfully updated port: 58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.535064] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896620, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.546968] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a8ad61-9f09-dd08-84f8-3e7a68823ab9, 'name': SearchDatastore_Task, 'duration_secs': 0.019642} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.551206] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 867.551206] env[69992]: value = "task-2896621" [ 867.551206] env[69992]: _type = "Task" [ 867.551206] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.551512] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e3640d-dab1-4cc8-8a96-e60142ece6a4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.567220] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 867.567220] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a91cae-3f55-187e-9871-4e96902c06eb" [ 867.567220] env[69992]: _type = "Task" [ 867.567220] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.570550] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896621, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.580618] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a91cae-3f55-187e-9871-4e96902c06eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.808963] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896619, 'name': CreateVM_Task, 'duration_secs': 0.401581} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.809439] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.810860] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.811294] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.812025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 867.812478] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af642f43-0e9d-4e84-b84f-7a5c4dc0dc97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.819913] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 867.819913] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525eee6e-a5cd-3e91-a37a-1ed9b5c2b7d4" [ 867.819913] env[69992]: _type = "Task" [ 867.819913] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.830579] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525eee6e-a5cd-3e91-a37a-1ed9b5c2b7d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.859089] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Updated VIF entry in instance network info cache for port 0f5065fc-9e45-41f9-a922-76f438876fea. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.859456] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Updating instance_info_cache with network_info: [{"id": "0f5065fc-9e45-41f9-a922-76f438876fea", "address": "fa:16:3e:de:43:7f", "network": {"id": "78124c99-daa7-4df7-a2d7-c611435a8b0a", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-508185452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f02818bc6a9c48b0810968803c8be436", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2bf584a-b4a3-4e7a-b0b7-eb8a2bc5a11d", "external-id": "nsx-vlan-transportzone-286", "segmentation_id": 286, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f5065fc-9e", "ovs_interfaceid": "0f5065fc-9e45-41f9-a922-76f438876fea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.938035] env[69992]: DEBUG oslo_vmware.api [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896615, 'name': PowerOnVM_Task, 'duration_secs': 0.975863} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.938035] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.938035] env[69992]: INFO nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Took 16.44 seconds to spawn the instance on the hypervisor. [ 867.938035] env[69992]: DEBUG nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 867.938035] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843f024e-6e00-40aa-8a80-75073679135f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.940436] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aa21e319-e44c-4072-ae77-8599a5eecd78 tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.969s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.005806] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896620, 'name': Rename_Task, 'duration_secs': 0.222385} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.006313] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.007197] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42a1beea-7df1-4a70-93d7-26c3c933be25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.020769] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 868.020769] env[69992]: value = "task-2896622" [ 868.020769] env[69992]: _type = "Task" [ 868.020769] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.028423] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.030070] env[69992]: DEBUG nova.scheduler.client.report [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 22 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 868.030070] env[69992]: DEBUG nova.compute.provider_tree [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 22 to 23 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 868.030070] env[69992]: DEBUG nova.compute.provider_tree [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 868.036412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "refresh_cache-1d5722e1-5a48-4212-bbc7-527a3739db6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.036412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquired lock "refresh_cache-1d5722e1-5a48-4212-bbc7-527a3739db6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.036412] env[69992]: DEBUG nova.network.neutron [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 868.068200] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896621, 'name': ReconfigVM_Task, 'duration_secs': 0.447568} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.068200] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Reconfigured VM instance instance-00000008 to attach disk [datastore1] ee4c0f2b-44cb-4b37-8e4a-5706b9932144/ee4c0f2b-44cb-4b37-8e4a-5706b9932144.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.069088] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4aff45f1-e4da-4538-bafd-fca91d4e363f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.083043] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a91cae-3f55-187e-9871-4e96902c06eb, 'name': SearchDatastore_Task, 'duration_secs': 0.024597} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.084575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.085513] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5/1d436762-964d-40d9-871e-ee33c3ba25b5.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.085513] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 868.085513] env[69992]: value = "task-2896623" [ 868.085513] env[69992]: _type = "Task" [ 868.085513] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.086136] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21966e25-0c89-463d-8023-b84227d07fa7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.099536] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896623, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.102061] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 868.102061] env[69992]: value = "task-2896624" [ 868.102061] env[69992]: _type = "Task" [ 868.102061] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.113208] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.336472] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525eee6e-a5cd-3e91-a37a-1ed9b5c2b7d4, 'name': SearchDatastore_Task, 'duration_secs': 0.012512} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.336790] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.336921] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.337518] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.337518] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.337518] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.337755] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca533e3a-4e8c-4d7d-82d5-aaffab257f49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.358018] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.358396] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.360016] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2cdc96-2757-4d22-89b0-8e39ff19f8ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.368209] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Releasing lock "refresh_cache-068507bb-ee7a-44f7-b315-7d4b2b70e735" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.369162] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Received event network-changed-c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.369442] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Refreshing instance network info cache due to event network-changed-c01a5abb-0c56-4377-ab40-619062fc6092. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 868.373027] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquiring lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.373027] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquired lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.373027] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Refreshing network info cache for port c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 868.373679] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 868.373679] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f7761-b515-944f-ac58-0390b4ecb9d5" [ 868.373679] env[69992]: _type = "Task" [ 868.373679] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.384130] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528f7761-b515-944f-ac58-0390b4ecb9d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.445110] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 868.463250] env[69992]: INFO nova.compute.manager [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Took 26.37 seconds to build instance. [ 868.532118] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896622, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.535380] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.664s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.535845] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 868.538535] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.698s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.540156] env[69992]: INFO nova.compute.claims [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.605549] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896623, 'name': Rename_Task, 'duration_secs': 0.20158} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.608461] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.609410] env[69992]: DEBUG nova.network.neutron [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.611676] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd1f3818-7790-47c5-b811-b96c20a98c7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.621923] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896624, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.623839] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 868.623839] env[69992]: value = "task-2896625" [ 868.623839] env[69992]: _type = "Task" [ 868.623839] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.634841] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.885708] env[69992]: DEBUG nova.network.neutron [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Updating instance_info_cache with network_info: [{"id": "58835cdc-7ea4-4647-9a86-35f7cb486922", "address": "fa:16:3e:27:ed:df", "network": {"id": "47290065-7498-4833-a617-c1038575b524", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1460659213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60fa5c5488fd4ac38c8c8556109c7413", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58835cdc-7e", "ovs_interfaceid": "58835cdc-7ea4-4647-9a86-35f7cb486922", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.894176] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528f7761-b515-944f-ac58-0390b4ecb9d5, 'name': SearchDatastore_Task, 'duration_secs': 0.058948} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.895146] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-698c9082-ddf7-4dd2-a8bf-58d8540979ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.905375] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 868.905375] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]520e058a-0b8a-43a8-8db9-c4807278efb0" [ 868.905375] env[69992]: _type = "Task" [ 868.905375] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.921667] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520e058a-0b8a-43a8-8db9-c4807278efb0, 'name': SearchDatastore_Task, 'duration_secs': 0.013952} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.922185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.922683] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e5d9de80-1ee5-462a-8459-168fd60e1972/e5d9de80-1ee5-462a-8459-168fd60e1972.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.923159] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5b2ee8a-17ce-41a8-ac67-9fe514004c00 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.953181] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 868.953181] env[69992]: value = "task-2896626" [ 868.953181] env[69992]: _type = "Task" [ 868.953181] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.971195] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ca930d4-b582-4483-afe2-2571e7d5a46e tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.889s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.971554] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.973635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.033668] env[69992]: DEBUG oslo_vmware.api [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896622, 'name': PowerOnVM_Task, 'duration_secs': 0.630304} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.034351] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.034609] env[69992]: INFO nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Took 15.22 seconds to spawn the instance on the hypervisor. [ 869.034788] env[69992]: DEBUG nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.036114] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6685720-ef48-4a37-8309-421a21e620b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.054958] env[69992]: DEBUG nova.compute.utils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.057932] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 869.057932] env[69992]: DEBUG nova.network.neutron [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.124147] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896624, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556547} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.129481] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5/1d436762-964d-40d9-871e-ee33c3ba25b5.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.129602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.130682] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d17ff035-232d-40c7-9c5f-621e5d4ae4ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.144201] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896625, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.149937] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 869.149937] env[69992]: value = "task-2896627" [ 869.149937] env[69992]: _type = "Task" [ 869.149937] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.151599] env[69992]: DEBUG nova.policy [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f8ef8c12616427f96013193b4ef0c8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '662852335b0d4a50ac0e0afb0a9f58dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 869.164690] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.245748] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "a49b4721-e338-4e60-b91e-137caa3c9c03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.246090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.314304] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Updated VIF entry in instance network info cache for port c01a5abb-0c56-4377-ab40-619062fc6092. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.314740] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Updating instance_info_cache with network_info: [{"id": "c01a5abb-0c56-4377-ab40-619062fc6092", "address": "fa:16:3e:bb:32:3c", "network": {"id": "1f29edc7-882e-4623-86e7-3d4c6cd47bdd", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1063426455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbe8c17ad0154c55a053d464c46a4857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc01a5abb-0c", "ovs_interfaceid": "c01a5abb-0c56-4377-ab40-619062fc6092", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.390121] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Releasing lock "refresh_cache-1d5722e1-5a48-4212-bbc7-527a3739db6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.390472] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Instance network_info: |[{"id": "58835cdc-7ea4-4647-9a86-35f7cb486922", "address": "fa:16:3e:27:ed:df", "network": {"id": "47290065-7498-4833-a617-c1038575b524", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1460659213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60fa5c5488fd4ac38c8c8556109c7413", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58835cdc-7e", "ovs_interfaceid": "58835cdc-7ea4-4647-9a86-35f7cb486922", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 869.393862] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:ed:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58835cdc-7ea4-4647-9a86-35f7cb486922', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.399825] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Creating folder: Project (60fa5c5488fd4ac38c8c8556109c7413). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.400534] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0e3f5b3-c713-446c-abb9-ca5e79ec69c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.418829] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Created folder: Project (60fa5c5488fd4ac38c8c8556109c7413) in parent group-v581821. [ 869.419054] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Creating folder: Instances. Parent ref: group-v581852. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.419411] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0eb99260-0890-451a-8716-94191fe76245 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.438795] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Created folder: Instances in parent group-v581852. [ 869.439220] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.439467] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.439714] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed77c63a-6059-4931-b9f1-dc770006d631 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.473769] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896626, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.475342] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.475342] env[69992]: value = "task-2896630" [ 869.475342] env[69992]: _type = "Task" [ 869.475342] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.476096] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 869.487668] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896630, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.569828] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 869.579131] env[69992]: INFO nova.compute.manager [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Took 22.40 seconds to build instance. [ 869.648793] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896625, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.667613] env[69992]: DEBUG nova.network.neutron [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Successfully created port: ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.676533] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214704} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.676741] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.678355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1703f2f-3ccb-463f-a2d0-9639f60c371d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.707548] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5/1d436762-964d-40d9-871e-ee33c3ba25b5.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.710771] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91a05953-ba44-4dcc-921b-15d525111e36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.735467] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 869.735467] env[69992]: value = "task-2896631" [ 869.735467] env[69992]: _type = "Task" [ 869.735467] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.747925] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.820315] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Releasing lock "refresh_cache-fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.820627] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-vif-plugged-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.820910] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquiring lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.821176] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.821394] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.821617] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] No waiting events found dispatching network-vif-plugged-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.821861] env[69992]: WARNING nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received unexpected event network-vif-plugged-1f86db68-8a81-421c-aa9b-4daab0584c4c for instance with vm_state building and task_state spawning. [ 869.822093] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.822310] env[69992]: DEBUG nova.compute.manager [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing instance network info cache due to event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 869.822542] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquiring lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.822689] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Acquired lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.822847] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.923141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a122040-7d39-4a9b-a0d8-cd5650e917d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.948641] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9f22c1-ff62-4d11-9ca5-570ff6b8a1c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.004478] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c858516e-7229-4e60-a8b6-e4fe1ca15904 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.013489] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57459} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.014773] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e5d9de80-1ee5-462a-8459-168fd60e1972/e5d9de80-1ee5-462a-8459-168fd60e1972.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.015012] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.015311] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9733d84-9985-4c91-9ef7-c9d218cab25f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.021817] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896630, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.027846] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23996808-0af9-469c-8e23-5fcdeaa39208 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.036597] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 870.036597] env[69992]: value = "task-2896632" [ 870.036597] env[69992]: _type = "Task" [ 870.036597] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.038029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.059030] env[69992]: DEBUG nova.compute.provider_tree [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.065565] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.083715] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0741d9aa-5f32-477d-909d-2aba026c7aa6 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.917s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.152168] env[69992]: DEBUG oslo_vmware.api [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896625, 'name': PowerOnVM_Task, 'duration_secs': 1.45708} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.152424] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.152621] env[69992]: INFO nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Took 10.42 seconds to spawn the instance on the hypervisor. [ 870.152801] env[69992]: DEBUG nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 870.153889] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0eeb46-7730-42ce-b5f7-345a1ee69b14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.171517] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Acquiring lock "93b78a8b-389c-4114-8c1d-da80146d80f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.171881] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.172241] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Acquiring lock "93b78a8b-389c-4114-8c1d-da80146d80f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.172562] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.172853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.175808] env[69992]: INFO nova.compute.manager [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Terminating instance [ 870.245949] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896631, 'name': ReconfigVM_Task, 'duration_secs': 0.400769} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.246247] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5/1d436762-964d-40d9-871e-ee33c3ba25b5.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.246934] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba801d7e-3597-48e2-8936-a695779bb065 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.257414] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 870.257414] env[69992]: value = "task-2896633" [ 870.257414] env[69992]: _type = "Task" [ 870.257414] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.267191] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896633, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.340480] env[69992]: DEBUG nova.compute.manager [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Received event network-changed-bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.340681] env[69992]: DEBUG nova.compute.manager [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Refreshing instance network info cache due to event network-changed-bb164768-c900-42bd-819e-eb523bfc2d54. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 870.342342] env[69992]: DEBUG oslo_concurrency.lockutils [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] Acquiring lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.342342] env[69992]: DEBUG oslo_concurrency.lockutils [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] Acquired lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.342342] env[69992]: DEBUG nova.network.neutron [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Refreshing network info cache for port bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.377853] env[69992]: DEBUG nova.compute.manager [None req-7633eb50-3bfd-4372-9cf6-3896392e9163 tempest-ServerDiagnosticsTest-2036708373 tempest-ServerDiagnosticsTest-2036708373-project-admin] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 870.381023] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32cce53-2fef-4641-a4ab-1f936f33ee42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.387077] env[69992]: INFO nova.compute.manager [None req-7633eb50-3bfd-4372-9cf6-3896392e9163 tempest-ServerDiagnosticsTest-2036708373 tempest-ServerDiagnosticsTest-2036708373-project-admin] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Retrieving diagnostics [ 870.387867] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7708b086-5b43-4658-a5ed-5a9842f4ef5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.393496] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Received event network-changed-2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.393875] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Refreshing instance network info cache due to event network-changed-2584dc71-913f-4c9b-922c-f8b28530b82f. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 870.393875] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquiring lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.394015] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquired lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.395021] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Refreshing network info cache for port 2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.508812] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896630, 'name': CreateVM_Task, 'duration_secs': 0.679292} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.508989] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.509699] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.509867] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.510210] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 870.510473] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce1d33f4-5c69-4b93-a888-0a3867af435a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.517391] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 870.517391] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526e4902-e0f6-cb88-0397-6f33ef751548" [ 870.517391] env[69992]: _type = "Task" [ 870.517391] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.526883] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526e4902-e0f6-cb88-0397-6f33ef751548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.552623] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089266} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.552783] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.553573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415c5d4f-606d-419a-a497-5662809e759c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.569517] env[69992]: DEBUG nova.scheduler.client.report [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.581267] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] e5d9de80-1ee5-462a-8459-168fd60e1972/e5d9de80-1ee5-462a-8459-168fd60e1972.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.583389] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 870.584939] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39a1ccc7-c2b8-4a28-b010-53e36f4dcde1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.604818] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.617321] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 870.617321] env[69992]: value = "task-2896634" [ 870.617321] env[69992]: _type = "Task" [ 870.617321] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.629576] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896634, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.631844] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.632089] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.632248] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.632431] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.632576] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.632873] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.633123] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.633348] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.633561] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.633736] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.633912] env[69992]: DEBUG nova.virt.hardware [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.634765] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041f4d69-8a32-4d2c-b551-8a4584cc88c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.643614] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128d4693-cd61-4a43-b3fb-28f1d709af2a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.649359] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updated VIF entry in instance network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.649693] env[69992]: DEBUG nova.network.neutron [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.674655] env[69992]: INFO nova.compute.manager [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Took 20.11 seconds to build instance. [ 870.682175] env[69992]: DEBUG nova.compute.manager [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 870.682175] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 870.682175] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2857c5-0f56-4ab1-a1d0-acc1f059403b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.692176] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 870.692456] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fea32997-9ab2-4819-b2eb-230dc7a115bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.701540] env[69992]: DEBUG oslo_vmware.api [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Waiting for the task: (returnval){ [ 870.701540] env[69992]: value = "task-2896635" [ 870.701540] env[69992]: _type = "Task" [ 870.701540] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.711425] env[69992]: DEBUG oslo_vmware.api [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Task: {'id': task-2896635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.770772] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896633, 'name': Rename_Task, 'duration_secs': 0.482886} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.771102] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.771360] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3021bd9-edb1-4379-877b-56c430430026 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.779871] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 870.779871] env[69992]: value = "task-2896636" [ 870.779871] env[69992]: _type = "Task" [ 870.779871] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.791688] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.036416] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526e4902-e0f6-cb88-0397-6f33ef751548, 'name': SearchDatastore_Task, 'duration_secs': 0.033781} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.036740] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.036987] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.037662] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.037821] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.038030] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.038282] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e578e84e-12e0-4ed2-9832-9fbb19087f62 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.049524] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.049662] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.050546] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5afb209d-7773-4765-892d-9cd4fb723f90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.057827] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 871.057827] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a805fd-76d1-4cfa-26f1-5100b9f55b8c" [ 871.057827] env[69992]: _type = "Task" [ 871.057827] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.074810] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a805fd-76d1-4cfa-26f1-5100b9f55b8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.084148] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.084645] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 871.089687] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.755s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.092743] env[69992]: INFO nova.compute.claims [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.131518] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896634, 'name': ReconfigVM_Task, 'duration_secs': 0.388697} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.132095] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Reconfigured VM instance instance-0000000a to attach disk [datastore1] e5d9de80-1ee5-462a-8459-168fd60e1972/e5d9de80-1ee5-462a-8459-168fd60e1972.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.132440] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf28a81c-4d58-48bf-929e-dee1bb8ddc6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.144125] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 871.144125] env[69992]: value = "task-2896637" [ 871.144125] env[69992]: _type = "Task" [ 871.144125] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.151011] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.155720] env[69992]: DEBUG oslo_concurrency.lockutils [req-a7a8fbbc-013a-41bb-ab01-357990649046 req-d2863f7f-87b9-4696-b8f7-3330d8e00c1d service nova] Releasing lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.159592] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896637, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.178104] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d627cb8a-0a8c-4c04-8a64-97b05757a2a8 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.634s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.214909] env[69992]: DEBUG oslo_vmware.api [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Task: {'id': task-2896635, 'name': PowerOffVM_Task, 'duration_secs': 0.249287} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.215422] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 871.215825] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 871.219022] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49402569-63f2-4a9e-a84a-553dc80d8a4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.230728] env[69992]: DEBUG nova.network.neutron [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updated VIF entry in instance network info cache for port bb164768-c900-42bd-819e-eb523bfc2d54. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.231151] env[69992]: DEBUG nova.network.neutron [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updating instance_info_cache with network_info: [{"id": "bb164768-c900-42bd-819e-eb523bfc2d54", "address": "fa:16:3e:8d:66:f4", "network": {"id": "023d028d-4d61-4461-91bb-ebf0da99219b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-413392921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23727c76d64d4449820b8f861230275e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb164768-c9", "ovs_interfaceid": "bb164768-c900-42bd-819e-eb523bfc2d54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.290533] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updated VIF entry in instance network info cache for port 2584dc71-913f-4c9b-922c-f8b28530b82f. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.290760] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance_info_cache with network_info: [{"id": "2584dc71-913f-4c9b-922c-f8b28530b82f", "address": "fa:16:3e:42:68:21", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2584dc71-91", "ovs_interfaceid": "2584dc71-913f-4c9b-922c-f8b28530b82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.294153] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 871.294372] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 871.294569] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Deleting the datastore file [datastore1] 93b78a8b-389c-4114-8c1d-da80146d80f3 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 871.295125] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b662647f-775c-4ec5-ad99-230d572c1806 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.301689] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896636, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.309510] env[69992]: DEBUG oslo_vmware.api [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Waiting for the task: (returnval){ [ 871.309510] env[69992]: value = "task-2896639" [ 871.309510] env[69992]: _type = "Task" [ 871.309510] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.321483] env[69992]: DEBUG oslo_vmware.api [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Task: {'id': task-2896639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.376127] env[69992]: DEBUG nova.compute.manager [None req-7489e921-a067-4b5d-b29a-d8514585e6eb tempest-ServerDiagnosticsV248Test-71468773 tempest-ServerDiagnosticsV248Test-71468773-project-admin] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 871.377606] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef433e5-0f2a-4665-8c4e-9eec2674f13a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.387444] env[69992]: INFO nova.compute.manager [None req-7489e921-a067-4b5d-b29a-d8514585e6eb tempest-ServerDiagnosticsV248Test-71468773 tempest-ServerDiagnosticsV248Test-71468773-project-admin] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Retrieving diagnostics [ 871.388307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6d3109-9c3a-4273-a2d9-99fd894395b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.576167] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a805fd-76d1-4cfa-26f1-5100b9f55b8c, 'name': SearchDatastore_Task, 'duration_secs': 0.013921} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.577110] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a2d79b2-0dd1-41bc-937b-f5206d5a706e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.584603] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 871.584603] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fe34e3-7129-7eac-664d-98dc1746d79b" [ 871.584603] env[69992]: _type = "Task" [ 871.584603] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.595909] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fe34e3-7129-7eac-664d-98dc1746d79b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.600432] env[69992]: DEBUG nova.compute.utils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.602112] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 871.602178] env[69992]: DEBUG nova.network.neutron [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.658957] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896637, 'name': Rename_Task, 'duration_secs': 0.193954} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.661123] env[69992]: DEBUG nova.policy [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f97c3c21b7d45308c3c7c1094bf4039', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96a05199dc1445dcb7b42b7feb26ed2d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.666293] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.666293] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6feb1a7-7ef9-45cf-9eac-7f696bd86c5a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.675790] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 871.675790] env[69992]: value = "task-2896640" [ 871.675790] env[69992]: _type = "Task" [ 871.675790] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.680308] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.688660] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.734278] env[69992]: DEBUG oslo_concurrency.lockutils [req-7ed2a642-5f3a-4c40-b805-58fadca7a259 req-c957db4c-19f6-4d5b-92e8-5905c69a3b87 service nova] Releasing lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.793838] env[69992]: DEBUG oslo_vmware.api [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896636, 'name': PowerOnVM_Task, 'duration_secs': 0.703742} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.798042] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.798042] env[69992]: INFO nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Took 9.68 seconds to spawn the instance on the hypervisor. [ 871.798042] env[69992]: DEBUG nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 871.798042] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51c364d-5b93-42db-a3a5-5fafaa3f9f8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.799491] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Releasing lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.800087] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Received event network-vif-deleted-0adb34fa-f52f-4b4d-983b-afa1a04f5624 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.800438] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-vif-plugged-e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.800910] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.801321] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.802041] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.802358] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] No waiting events found dispatching network-vif-plugged-e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.802805] env[69992]: WARNING nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received unexpected event network-vif-plugged-e64de32e-0e37-4777-91e7-8be0da0fa147 for instance with vm_state building and task_state spawning. [ 871.803124] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-changed-e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.803412] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Refreshing instance network info cache due to event network-changed-e64de32e-0e37-4777-91e7-8be0da0fa147. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 871.806506] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquiring lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.806506] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquired lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.806506] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Refreshing network info cache for port e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.827826] env[69992]: DEBUG oslo_vmware.api [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Task: {'id': task-2896639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231672} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.828548] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.829080] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 871.829408] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 871.829723] env[69992]: INFO nova.compute.manager [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 871.830574] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 871.830574] env[69992]: DEBUG nova.compute.manager [-] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 871.832959] env[69992]: DEBUG nova.network.neutron [-] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 872.078576] env[69992]: DEBUG nova.network.neutron [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Successfully updated port: ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 872.106651] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fe34e3-7129-7eac-664d-98dc1746d79b, 'name': SearchDatastore_Task, 'duration_secs': 0.019155} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.108759] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 872.111069] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.111069] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1d5722e1-5a48-4212-bbc7-527a3739db6e/1d5722e1-5a48-4212-bbc7-527a3739db6e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.112485] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8710a98d-3192-46d7-b831-114036ae6167 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.128992] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 872.128992] env[69992]: value = "task-2896641" [ 872.128992] env[69992]: _type = "Task" [ 872.128992] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.140888] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.153545] env[69992]: DEBUG nova.network.neutron [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Successfully created port: 54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.193207] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896640, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.208699] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.334094] env[69992]: INFO nova.compute.manager [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Took 18.80 seconds to build instance. [ 872.473889] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d818cf80-e65b-47fd-9e86-415c40a46bc8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.483075] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eba3dac-e663-491c-a611-4646506741af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.526969] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820eacd2-f33f-4001-af5e-d0aabc2cddb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.535722] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820716d1-8d16-442c-8448-b34e1373eae0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.550612] env[69992]: DEBUG nova.compute.provider_tree [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.592310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.592310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquired lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.592310] env[69992]: DEBUG nova.network.neutron [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.614954] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updated VIF entry in instance network info cache for port e64de32e-0e37-4777-91e7-8be0da0fa147. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.615386] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.640223] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896641, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.689128] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896640, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.743060] env[69992]: DEBUG nova.network.neutron [-] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.836167] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdaf08b-c3bd-430d-ae4c-ff5fa5bbe064 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.320s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.053839] env[69992]: DEBUG nova.scheduler.client.report [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.120405] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Releasing lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.120660] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Received event network-vif-plugged-58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 873.120851] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquiring lock "1d5722e1-5a48-4212-bbc7-527a3739db6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.121067] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.121251] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.121387] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] No waiting events found dispatching network-vif-plugged-58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 873.121561] env[69992]: WARNING nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Received unexpected event network-vif-plugged-58835cdc-7ea4-4647-9a86-35f7cb486922 for instance with vm_state building and task_state spawning. [ 873.121719] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Received event network-changed-58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 873.121868] env[69992]: DEBUG nova.compute.manager [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Refreshing instance network info cache due to event network-changed-58835cdc-7ea4-4647-9a86-35f7cb486922. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 873.122061] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquiring lock "refresh_cache-1d5722e1-5a48-4212-bbc7-527a3739db6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.122725] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Acquired lock "refresh_cache-1d5722e1-5a48-4212-bbc7-527a3739db6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.122725] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Refreshing network info cache for port 58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 873.125090] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 873.137435] env[69992]: DEBUG nova.network.neutron [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.144541] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.888755} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.144541] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1d5722e1-5a48-4212-bbc7-527a3739db6e/1d5722e1-5a48-4212-bbc7-527a3739db6e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.144641] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.145163] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a884245-168d-4428-bd3e-2cef261ae8eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.155215] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 873.155215] env[69992]: value = "task-2896642" [ 873.155215] env[69992]: _type = "Task" [ 873.155215] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.158716] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 873.158924] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.159803] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 873.160180] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.160352] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 873.160497] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 873.160749] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 873.160928] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 873.161130] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 873.161296] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 873.161489] env[69992]: DEBUG nova.virt.hardware [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 873.162476] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df5a64e-d138-4a71-b4bd-b9d8d94a3abf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.181773] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.185795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac755c2b-a560-47d3-9851-e58debbee995 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.206796] env[69992]: DEBUG oslo_vmware.api [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2896640, 'name': PowerOnVM_Task, 'duration_secs': 1.035894} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.207463] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.207669] env[69992]: INFO nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Took 8.73 seconds to spawn the instance on the hypervisor. [ 873.207856] env[69992]: DEBUG nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.209055] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba85033-7a9d-4f4e-b21b-4bced0772069 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.245920] env[69992]: INFO nova.compute.manager [-] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Took 1.42 seconds to deallocate network for instance. [ 873.320762] env[69992]: DEBUG nova.network.neutron [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Updating instance_info_cache with network_info: [{"id": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "address": "fa:16:3e:81:67:75", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8aea81-10", "ovs_interfaceid": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.343891] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.346588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.346910] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.347086] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.347274] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.347444] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.349337] env[69992]: INFO nova.compute.manager [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Terminating instance [ 873.563105] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.563360] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 873.565971] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.197s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.566212] env[69992]: DEBUG nova.objects.instance [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lazy-loading 'resources' on Instance uuid e74441fc-361f-4e0b-bfdd-6f8213db51e3 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.677327] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175187} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.677327] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.679951] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42f703f-f9dd-4387-aacb-e8d0b9b518aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.707282] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 1d5722e1-5a48-4212-bbc7-527a3739db6e/1d5722e1-5a48-4212-bbc7-527a3739db6e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.707580] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e841c4c6-39a5-49a0-9cc0-01b38fa11e11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.736313] env[69992]: INFO nova.compute.manager [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Took 19.16 seconds to build instance. [ 873.742983] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 873.742983] env[69992]: value = "task-2896643" [ 873.742983] env[69992]: _type = "Task" [ 873.742983] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.754641] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.754965] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.822752] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Releasing lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.823106] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Instance network_info: |[{"id": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "address": "fa:16:3e:81:67:75", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8aea81-10", "ovs_interfaceid": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 873.823556] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:67:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac8aea81-104c-4dc6-a761-379a3d5a7b2d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.833478] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Creating folder: Project (662852335b0d4a50ac0e0afb0a9f58dc). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.834751] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da9a84e1-8193-4814-8200-195e793b5f93 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.848034] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Created folder: Project (662852335b0d4a50ac0e0afb0a9f58dc) in parent group-v581821. [ 873.848349] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Creating folder: Instances. Parent ref: group-v581855. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.848927] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ec4021e-2091-46d8-9eb1-4d04d11bd50b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.854501] env[69992]: DEBUG nova.compute.manager [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 873.854815] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.857733] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fa63ea-a1a6-46ca-80d6-fc51ec9b6d97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.863038] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Created folder: Instances in parent group-v581855. [ 873.863176] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 873.863658] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.863864] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7ff3cae-eff2-443e-a305-e015ba293318 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.883014] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.884147] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.885177] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-403f5d15-eaaa-47eb-bea3-544b299a5073 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.890233] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.890233] env[69992]: value = "task-2896646" [ 873.890233] env[69992]: _type = "Task" [ 873.890233] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.900196] env[69992]: DEBUG oslo_vmware.api [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 873.900196] env[69992]: value = "task-2896647" [ 873.900196] env[69992]: _type = "Task" [ 873.900196] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.905324] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896646, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.912346] env[69992]: DEBUG oslo_vmware.api [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.970077] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Updated VIF entry in instance network info cache for port 58835cdc-7ea4-4647-9a86-35f7cb486922. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.971314] env[69992]: DEBUG nova.network.neutron [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Updating instance_info_cache with network_info: [{"id": "58835cdc-7ea4-4647-9a86-35f7cb486922", "address": "fa:16:3e:27:ed:df", "network": {"id": "47290065-7498-4833-a617-c1038575b524", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1460659213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60fa5c5488fd4ac38c8c8556109c7413", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58835cdc-7e", "ovs_interfaceid": "58835cdc-7ea4-4647-9a86-35f7cb486922", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.024141] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.024599] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.024785] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.025398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.025398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.028699] env[69992]: INFO nova.compute.manager [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Terminating instance [ 874.039703] env[69992]: DEBUG nova.compute.manager [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Received event network-vif-plugged-ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.039922] env[69992]: DEBUG oslo_concurrency.lockutils [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] Acquiring lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.040719] env[69992]: DEBUG oslo_concurrency.lockutils [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.040844] env[69992]: DEBUG oslo_concurrency.lockutils [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.040990] env[69992]: DEBUG nova.compute.manager [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] No waiting events found dispatching network-vif-plugged-ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.041601] env[69992]: WARNING nova.compute.manager [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Received unexpected event network-vif-plugged-ac8aea81-104c-4dc6-a761-379a3d5a7b2d for instance with vm_state building and task_state spawning. [ 874.041809] env[69992]: DEBUG nova.compute.manager [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Received event network-changed-ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.041995] env[69992]: DEBUG nova.compute.manager [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Refreshing instance network info cache due to event network-changed-ac8aea81-104c-4dc6-a761-379a3d5a7b2d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 874.042284] env[69992]: DEBUG oslo_concurrency.lockutils [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] Acquiring lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.042351] env[69992]: DEBUG oslo_concurrency.lockutils [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] Acquired lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.043834] env[69992]: DEBUG nova.network.neutron [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Refreshing network info cache for port ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.051800] env[69992]: DEBUG nova.network.neutron [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Successfully updated port: 54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.071040] env[69992]: DEBUG nova.compute.utils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 874.079210] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 874.079567] env[69992]: DEBUG nova.network.neutron [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.085117] env[69992]: DEBUG nova.compute.manager [req-b78cde27-e594-46be-81ec-07caca080440 req-23695d0c-0b27-4e9d-88d6-0fa2ac658f6b service nova] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Received event network-vif-deleted-ebe4280c-0d36-4d08-8c4b-cba51c7f80e9 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.103792] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.104123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.211398] env[69992]: DEBUG nova.policy [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f616200813c34b55bf3a07357c84b61e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44ba33c0faa743dbba7666a8b8eec8df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 874.241786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-073a033e-9e47-4182-96b2-ac75897b679d tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.679s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.258513] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.411666] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896646, 'name': CreateVM_Task, 'duration_secs': 0.460994} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.411987] env[69992]: DEBUG oslo_vmware.api [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896647, 'name': PowerOffVM_Task, 'duration_secs': 0.270587} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.412121] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.412365] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.412572] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.415384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.415384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.415384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 874.415384] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00a0b3e9-09c7-4fa0-bb27-97915ef80760 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.418735] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ce0f06-a7c5-43ca-853b-fc797490dc70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.420042] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e3d0684-aeea-4f8c-9a26-b471defdaf0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.425187] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 874.425187] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523d6cc2-02bb-f8aa-a131-364371e72049" [ 874.425187] env[69992]: _type = "Task" [ 874.425187] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.430896] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc40a298-d200-4d98-bd10-1c32f1479c87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.439780] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523d6cc2-02bb-f8aa-a131-364371e72049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.474039] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f97dd2-af8b-4c2d-81ae-f73ec164aca7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.478145] env[69992]: DEBUG oslo_concurrency.lockutils [req-03058006-a6c2-4cbc-8af8-f78117d996da req-98bce428-5903-45ed-a372-9b546fd58516 service nova] Releasing lock "refresh_cache-1d5722e1-5a48-4212-bbc7-527a3739db6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.484573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd0f510-00df-4d21-bd3b-959ebce11f5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.489865] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.490169] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.490319] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Deleting the datastore file [datastore1] e934fc79-f7c5-4ca9-9f81-85467c1e9b45 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.490969] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deaae497-4d17-4b5a-b49d-f08d030221bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.502277] env[69992]: DEBUG nova.compute.provider_tree [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.505517] env[69992]: DEBUG oslo_vmware.api [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for the task: (returnval){ [ 874.505517] env[69992]: value = "task-2896649" [ 874.505517] env[69992]: _type = "Task" [ 874.505517] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.513945] env[69992]: DEBUG oslo_vmware.api [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.536085] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "refresh_cache-f249c0b9-ddd7-4b63-ae3a-11035764d3e5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.536271] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquired lock "refresh_cache-f249c0b9-ddd7-4b63-ae3a-11035764d3e5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.536487] env[69992]: DEBUG nova.network.neutron [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.556693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.556693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquired lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.556693] env[69992]: DEBUG nova.network.neutron [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.577915] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 874.753829] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 874.765634] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896643, 'name': ReconfigVM_Task, 'duration_secs': 0.618897} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.766634] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 1d5722e1-5a48-4212-bbc7-527a3739db6e/1d5722e1-5a48-4212-bbc7-527a3739db6e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.766754] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aec5e0e9-6e6a-40b5-a08c-efbd60c6228e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.777064] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 874.777064] env[69992]: value = "task-2896650" [ 874.777064] env[69992]: _type = "Task" [ 874.777064] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.790559] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896650, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.874229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "00b2fd0b-7841-448d-82cf-436aa8d80cda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.874229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.940367] env[69992]: DEBUG nova.network.neutron [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Successfully created port: 3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.954638] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523d6cc2-02bb-f8aa-a131-364371e72049, 'name': SearchDatastore_Task, 'duration_secs': 0.031692} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.954956] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.955216] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.956092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.956092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.956092] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.957652] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a6a1966-57d4-4d70-9bd5-f9d65295aa1d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.973724] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.973724] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.974133] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd8c716-25ae-47dd-8de3-7f47a702d20b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.985131] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 874.985131] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52957f32-b830-56f3-5469-a4359bcf7250" [ 874.985131] env[69992]: _type = "Task" [ 874.985131] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.994823] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52957f32-b830-56f3-5469-a4359bcf7250, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.997791] env[69992]: DEBUG nova.network.neutron [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Updated VIF entry in instance network info cache for port ac8aea81-104c-4dc6-a761-379a3d5a7b2d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 874.998139] env[69992]: DEBUG nova.network.neutron [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Updating instance_info_cache with network_info: [{"id": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "address": "fa:16:3e:81:67:75", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8aea81-10", "ovs_interfaceid": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.008159] env[69992]: DEBUG nova.scheduler.client.report [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.025805] env[69992]: DEBUG oslo_vmware.api [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Task: {'id': task-2896649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.454161} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.026675] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.026675] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.026675] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.026675] env[69992]: INFO nova.compute.manager [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Took 1.17 seconds to destroy the instance on the hypervisor. [ 875.027019] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.027451] env[69992]: DEBUG nova.compute.manager [-] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.027546] env[69992]: DEBUG nova.network.neutron [-] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.096707] env[69992]: DEBUG nova.network.neutron [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.127506] env[69992]: DEBUG nova.network.neutron [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.282405] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.289695] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896650, 'name': Rename_Task, 'duration_secs': 0.250701} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.289987] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.290175] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-198ad3e9-3e7a-431c-89a4-32e8ade9b9b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.301112] env[69992]: DEBUG nova.network.neutron [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.303084] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 875.303084] env[69992]: value = "task-2896651" [ 875.303084] env[69992]: _type = "Task" [ 875.303084] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.312373] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.497525] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52957f32-b830-56f3-5469-a4359bcf7250, 'name': SearchDatastore_Task, 'duration_secs': 0.016769} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.498401] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc18056e-1830-4d21-a6e5-13c15e7c6e3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.501285] env[69992]: DEBUG oslo_concurrency.lockutils [req-978aa9d8-6503-4402-9e8f-501b267554e6 req-e750263f-b016-42f7-9305-b2e58b32890c service nova] Releasing lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.504478] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 875.504478] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bb44cc-9cb9-4210-378a-7eb99930d4d3" [ 875.504478] env[69992]: _type = "Task" [ 875.504478] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.514960] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bb44cc-9cb9-4210-378a-7eb99930d4d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.518490] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.524011] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.549s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.525720] env[69992]: INFO nova.compute.claims [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.552989] env[69992]: INFO nova.scheduler.client.report [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Deleted allocations for instance e74441fc-361f-4e0b-bfdd-6f8213db51e3 [ 875.595035] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 875.632169] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 875.632270] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.632447] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 875.632644] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.634028] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 875.634028] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 875.634028] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 875.634028] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 875.634028] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 875.634258] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 875.634258] env[69992]: DEBUG nova.virt.hardware [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 875.635970] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181781fa-3d76-4c42-94f6-2fef396c3ec9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.651595] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df08b9a7-1ce0-40a0-9e39-3abdb70a0080 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.657696] env[69992]: DEBUG nova.network.neutron [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updating instance_info_cache with network_info: [{"id": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "address": "fa:16:3e:d4:0a:13", "network": {"id": "654372b0-0166-4b14-b821-c42c0ad0568b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1742099554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96a05199dc1445dcb7b42b7feb26ed2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54980674-0d", "ovs_interfaceid": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.804635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Releasing lock "refresh_cache-f249c0b9-ddd7-4b63-ae3a-11035764d3e5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.805161] env[69992]: DEBUG nova.compute.manager [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 875.805366] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.806388] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c16a81-e242-436f-8dea-1ad459470335 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.818487] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896651, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.820705] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.820940] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-177103f1-9a12-41b7-aa11-e32e7c5d026c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.828142] env[69992]: DEBUG oslo_vmware.api [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 875.828142] env[69992]: value = "task-2896652" [ 875.828142] env[69992]: _type = "Task" [ 875.828142] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.837477] env[69992]: DEBUG oslo_vmware.api [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.016456] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bb44cc-9cb9-4210-378a-7eb99930d4d3, 'name': SearchDatastore_Task, 'duration_secs': 0.013844} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.016727] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.017034] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 6c58c05e-9679-4e53-89e7-c7c9cb11cff0/6c58c05e-9679-4e53-89e7-c7c9cb11cff0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.017306] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbd49d8d-9f69-4c5d-bd9d-7f397f08ee7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.028344] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 876.028344] env[69992]: value = "task-2896653" [ 876.028344] env[69992]: _type = "Task" [ 876.028344] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.040832] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.065729] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f57b0551-d59d-4476-9683-4b791c9c93b8 tempest-InstanceActionsV221TestJSON-1682143905 tempest-InstanceActionsV221TestJSON-1682143905-project-member] Lock "e74441fc-361f-4e0b-bfdd-6f8213db51e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.300s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.114058] env[69992]: DEBUG nova.compute.manager [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 876.160685] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Releasing lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.161045] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Instance network_info: |[{"id": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "address": "fa:16:3e:d4:0a:13", "network": {"id": "654372b0-0166-4b14-b821-c42c0ad0568b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1742099554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96a05199dc1445dcb7b42b7feb26ed2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54980674-0d", "ovs_interfaceid": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 876.161467] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:0a:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54980674-0d82-4eac-8cb8-3d49bf81e6f0', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.170945] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Creating folder: Project (96a05199dc1445dcb7b42b7feb26ed2d). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.170945] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0420c9d-1778-41c8-a67b-658d340f0856 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.187416] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Created folder: Project (96a05199dc1445dcb7b42b7feb26ed2d) in parent group-v581821. [ 876.188024] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Creating folder: Instances. Parent ref: group-v581858. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.188024] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b479c4e-cf24-4c11-91d5-4b9796447209 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.203301] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Created folder: Instances in parent group-v581858. [ 876.203301] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.203445] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eba81db1-973c-4981-baca-cb98e4087510] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.203613] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9293f782-0ba0-45b9-b487-b9e7489cb2f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.224259] env[69992]: DEBUG nova.network.neutron [-] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.233935] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.233935] env[69992]: value = "task-2896656" [ 876.233935] env[69992]: _type = "Task" [ 876.233935] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.246037] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896656, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.317927] env[69992]: DEBUG oslo_vmware.api [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896651, 'name': PowerOnVM_Task, 'duration_secs': 0.796444} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.318308] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.318735] env[69992]: INFO nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Took 9.42 seconds to spawn the instance on the hypervisor. [ 876.319022] env[69992]: DEBUG nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.319869] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966e17da-89aa-47c3-b8db-c7c7a69462c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.340040] env[69992]: DEBUG oslo_vmware.api [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896652, 'name': PowerOffVM_Task, 'duration_secs': 0.151358} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.340379] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.340605] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.340897] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b2ca590-db8d-4c01-a994-129f24fff9a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.370599] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.370832] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.371383] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Deleting the datastore file [datastore2] f249c0b9-ddd7-4b63-ae3a-11035764d3e5 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.372307] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a71f15be-5dd9-44b2-a3db-a8094d30da6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.382543] env[69992]: DEBUG oslo_vmware.api [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for the task: (returnval){ [ 876.382543] env[69992]: value = "task-2896658" [ 876.382543] env[69992]: _type = "Task" [ 876.382543] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.394105] env[69992]: DEBUG oslo_vmware.api [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.548972] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896653, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.639370] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.728340] env[69992]: INFO nova.compute.manager [-] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Took 1.70 seconds to deallocate network for instance. [ 876.750875] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896656, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.842523] env[69992]: INFO nova.compute.manager [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Took 17.47 seconds to build instance. [ 876.899256] env[69992]: DEBUG oslo_vmware.api [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Task: {'id': task-2896658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.45476} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.899508] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.899803] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.900108] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.900395] env[69992]: INFO nova.compute.manager [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Took 1.09 seconds to destroy the instance on the hypervisor. [ 876.900777] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.903882] env[69992]: DEBUG nova.compute.manager [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 876.904207] env[69992]: DEBUG nova.network.neutron [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.930863] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd49c19-9932-4783-a0bd-b2a428b964de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.935591] env[69992]: DEBUG nova.network.neutron [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.945943] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "a29534bf-ee12-4b94-839b-4a12659ebd3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.945943] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.951671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf0d51e-3875-469e-acd3-b6b489a553c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.990170] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294a507c-bd09-4b02-8f42-8f3d005048d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.998579] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669dcfca-72a1-4020-916c-827e5296d37a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.014242] env[69992]: DEBUG nova.compute.provider_tree [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.043201] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751318} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.043801] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 6c58c05e-9679-4e53-89e7-c7c9cb11cff0/6c58c05e-9679-4e53-89e7-c7c9cb11cff0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.043984] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.044248] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25494068-925c-4705-b4f6-58b718fc6399 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.053431] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 877.053431] env[69992]: value = "task-2896659" [ 877.053431] env[69992]: _type = "Task" [ 877.053431] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.057340] env[69992]: DEBUG nova.network.neutron [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Successfully updated port: 3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.066165] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.242081] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.246608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "bf75484e-4020-48f7-9419-bd88d0462b90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.246608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.254672] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896656, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.268670] env[69992]: DEBUG nova.compute.manager [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Received event network-changed-bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.268867] env[69992]: DEBUG nova.compute.manager [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Refreshing instance network info cache due to event network-changed-bb164768-c900-42bd-819e-eb523bfc2d54. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 877.269172] env[69992]: DEBUG oslo_concurrency.lockutils [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] Acquiring lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.269215] env[69992]: DEBUG oslo_concurrency.lockutils [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] Acquired lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.269491] env[69992]: DEBUG nova.network.neutron [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Refreshing network info cache for port bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.290345] env[69992]: DEBUG nova.compute.manager [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Received event network-vif-plugged-54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.290345] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.290345] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] Lock "eba81db1-973c-4981-baca-cb98e4087510-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.290592] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] Lock "eba81db1-973c-4981-baca-cb98e4087510-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.290660] env[69992]: DEBUG nova.compute.manager [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] No waiting events found dispatching network-vif-plugged-54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 877.290937] env[69992]: WARNING nova.compute.manager [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Received unexpected event network-vif-plugged-54980674-0d82-4eac-8cb8-3d49bf81e6f0 for instance with vm_state building and task_state spawning. [ 877.291027] env[69992]: DEBUG nova.compute.manager [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Received event network-changed-54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.291224] env[69992]: DEBUG nova.compute.manager [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Refreshing instance network info cache due to event network-changed-54980674-0d82-4eac-8cb8-3d49bf81e6f0. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 877.291412] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] Acquiring lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.291538] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] Acquired lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.291687] env[69992]: DEBUG nova.network.neutron [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Refreshing network info cache for port 54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.345020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-510bf1f4-7df0-47b0-a34c-49493fad7175 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.443413] env[69992]: DEBUG nova.network.neutron [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.518541] env[69992]: DEBUG nova.scheduler.client.report [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.544680] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "068507bb-ee7a-44f7-b315-7d4b2b70e735" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.544934] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.545154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "068507bb-ee7a-44f7-b315-7d4b2b70e735-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.545337] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.545505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.548164] env[69992]: INFO nova.compute.manager [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Terminating instance [ 877.562018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "refresh_cache-c1d73002-6e69-41a6-95b3-34dccaf872ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.562018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquired lock "refresh_cache-c1d73002-6e69-41a6-95b3-34dccaf872ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.562018] env[69992]: DEBUG nova.network.neutron [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.567968] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074023} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.568511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.572460] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0a814f-4e7c-4462-991d-9ea878582d1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.596664] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 6c58c05e-9679-4e53-89e7-c7c9cb11cff0/6c58c05e-9679-4e53-89e7-c7c9cb11cff0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.596974] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd72e17a-fef9-4c60-a356-175827244fb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.620857] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 877.620857] env[69992]: value = "task-2896660" [ 877.620857] env[69992]: _type = "Task" [ 877.620857] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.630479] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.754506] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896656, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.853051] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 877.947318] env[69992]: INFO nova.compute.manager [-] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Took 1.04 seconds to deallocate network for instance. [ 878.025425] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.025947] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 878.028754] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.991s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.031083] env[69992]: INFO nova.compute.claims [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.052340] env[69992]: DEBUG nova.compute.manager [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.052556] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.053453] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efafdb5d-0424-4ce2-9a05-f49be1dd540c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.063089] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.063089] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fafba981-92d5-4ddb-acd8-d446cd3eb851 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.074895] env[69992]: DEBUG oslo_vmware.api [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 878.074895] env[69992]: value = "task-2896661" [ 878.074895] env[69992]: _type = "Task" [ 878.074895] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.086594] env[69992]: DEBUG oslo_vmware.api [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.134402] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896660, 'name': ReconfigVM_Task, 'duration_secs': 0.30767} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.134755] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 6c58c05e-9679-4e53-89e7-c7c9cb11cff0/6c58c05e-9679-4e53-89e7-c7c9cb11cff0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.135686] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f478ca1-1ba1-466d-b211-d09dfb638ae7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.144457] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 878.144457] env[69992]: value = "task-2896662" [ 878.144457] env[69992]: _type = "Task" [ 878.144457] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.156244] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896662, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.182174] env[69992]: DEBUG nova.network.neutron [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.249220] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896656, 'name': CreateVM_Task, 'duration_secs': 1.759931} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.249468] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eba81db1-973c-4981-baca-cb98e4087510] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.250384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.250583] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.250966] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 878.251281] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a866f43-637d-4a09-8ad5-54ed07e395ba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.258198] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 878.258198] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52426929-2260-9550-6605-a2704928d8bd" [ 878.258198] env[69992]: _type = "Task" [ 878.258198] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.269846] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52426929-2260-9550-6605-a2704928d8bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.380259] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.457237] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.516789] env[69992]: DEBUG nova.network.neutron [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updated VIF entry in instance network info cache for port 54980674-0d82-4eac-8cb8-3d49bf81e6f0. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.517196] env[69992]: DEBUG nova.network.neutron [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updating instance_info_cache with network_info: [{"id": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "address": "fa:16:3e:d4:0a:13", "network": {"id": "654372b0-0166-4b14-b821-c42c0ad0568b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1742099554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96a05199dc1445dcb7b42b7feb26ed2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54980674-0d", "ovs_interfaceid": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.522640] env[69992]: DEBUG nova.network.neutron [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updated VIF entry in instance network info cache for port bb164768-c900-42bd-819e-eb523bfc2d54. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.522982] env[69992]: DEBUG nova.network.neutron [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updating instance_info_cache with network_info: [{"id": "bb164768-c900-42bd-819e-eb523bfc2d54", "address": "fa:16:3e:8d:66:f4", "network": {"id": "023d028d-4d61-4461-91bb-ebf0da99219b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-413392921-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23727c76d64d4449820b8f861230275e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb164768-c9", "ovs_interfaceid": "bb164768-c900-42bd-819e-eb523bfc2d54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.538123] env[69992]: DEBUG nova.compute.utils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.540659] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.540659] env[69992]: DEBUG nova.network.neutron [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.589016] env[69992]: DEBUG oslo_vmware.api [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896661, 'name': PowerOffVM_Task, 'duration_secs': 0.409369} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.589287] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.589511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.590167] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-563dcb9c-208e-45c7-93b8-71bbe65b1b2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.658202] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896662, 'name': Rename_Task, 'duration_secs': 0.249088} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.658617] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.658896] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8779637-7c90-4557-b6bf-12bd39d03719 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.671777] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 878.671777] env[69992]: value = "task-2896664" [ 878.671777] env[69992]: _type = "Task" [ 878.671777] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.693335] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.693625] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.693878] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Deleting the datastore file [datastore1] 068507bb-ee7a-44f7-b315-7d4b2b70e735 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.694274] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d027856-965e-4e1c-b3f2-586689dcfe45 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.704907] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.713484] env[69992]: DEBUG oslo_vmware.api [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for the task: (returnval){ [ 878.713484] env[69992]: value = "task-2896665" [ 878.713484] env[69992]: _type = "Task" [ 878.713484] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.722553] env[69992]: DEBUG oslo_vmware.api [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.775699] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52426929-2260-9550-6605-a2704928d8bd, 'name': SearchDatastore_Task, 'duration_secs': 0.019919} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.776267] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.776567] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.776876] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.777215] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.777469] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.777847] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fdb5dff-c6ea-44c0-947d-e99b124c64aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.799890] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.800190] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.803651] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-512e8066-d5f1-41ca-85d0-2d134f6012d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.810991] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 878.810991] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52490b83-b599-0499-49c9-d42aef7cf7a7" [ 878.810991] env[69992]: _type = "Task" [ 878.810991] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.812736] env[69992]: DEBUG nova.policy [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '312fc29022994913825ecfd425fcee2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea0f9171bc5c4034b8dbe9100bd6e007', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 878.826798] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52490b83-b599-0499-49c9-d42aef7cf7a7, 'name': SearchDatastore_Task} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.826798] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19799c48-3eca-4d85-ac37-305cb546e52c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.834440] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 878.834440] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529f9042-fa59-7101-a161-fa290da6b26f" [ 878.834440] env[69992]: _type = "Task" [ 878.834440] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.845310] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529f9042-fa59-7101-a161-fa290da6b26f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.883032] env[69992]: DEBUG nova.network.neutron [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Updating instance_info_cache with network_info: [{"id": "3ab410e0-7643-4dc4-b15e-ca8b2701aefa", "address": "fa:16:3e:37:be:b3", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ab410e0-76", "ovs_interfaceid": "3ab410e0-7643-4dc4-b15e-ca8b2701aefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.021978] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] Releasing lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.021978] env[69992]: DEBUG nova.compute.manager [req-fa047819-64f7-4a95-a880-c1684613cc90 req-4b808897-562a-4abd-b118-b2dfd44a9234 service nova] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Received event network-vif-deleted-fc767b62-dfd6-429e-84f0-140bda053ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 879.026672] env[69992]: DEBUG oslo_concurrency.lockutils [req-8ec0de5c-6636-4ba8-a393-2fd4023fc0a3 req-885692fb-b796-4353-a7bb-7e02037d988b service nova] Releasing lock "refresh_cache-64ab568c-a2ef-4bac-8885-3dde76f9f764" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.044389] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.191672] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896664, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.232069] env[69992]: DEBUG oslo_vmware.api [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Task: {'id': task-2896665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219272} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.232880] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.232880] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.233150] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.233375] env[69992]: INFO nova.compute.manager [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Took 1.18 seconds to destroy the instance on the hypervisor. [ 879.235198] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.235198] env[69992]: DEBUG nova.compute.manager [-] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.235198] env[69992]: DEBUG nova.network.neutron [-] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.263918] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.263918] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.353247] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529f9042-fa59-7101-a161-fa290da6b26f, 'name': SearchDatastore_Task, 'duration_secs': 0.012611} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.353247] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.353438] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] eba81db1-973c-4981-baca-cb98e4087510/eba81db1-973c-4981-baca-cb98e4087510.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 879.353682] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71ef96be-0a58-4491-99ad-22edbb748737 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.364877] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 879.364877] env[69992]: value = "task-2896666" [ 879.364877] env[69992]: _type = "Task" [ 879.364877] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.378852] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.385611] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Releasing lock "refresh_cache-c1d73002-6e69-41a6-95b3-34dccaf872ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.385945] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Instance network_info: |[{"id": "3ab410e0-7643-4dc4-b15e-ca8b2701aefa", "address": "fa:16:3e:37:be:b3", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ab410e0-76", "ovs_interfaceid": "3ab410e0-7643-4dc4-b15e-ca8b2701aefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 879.387756] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:be:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ab410e0-7643-4dc4-b15e-ca8b2701aefa', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.394625] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Creating folder: Project (44ba33c0faa743dbba7666a8b8eec8df). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.394925] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-881ae247-c83e-4b31-98a5-98ecabae3c52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.411122] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Created folder: Project (44ba33c0faa743dbba7666a8b8eec8df) in parent group-v581821. [ 879.411122] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Creating folder: Instances. Parent ref: group-v581861. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 879.411122] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d637ee3-2d43-4e5b-ada9-c2b227dfa831 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.423351] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Created folder: Instances in parent group-v581861. [ 879.425948] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.425948] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.425948] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d88dfbfe-f632-4747-a84c-60a7a8ffef54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.458831] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.458831] env[69992]: value = "task-2896669" [ 879.458831] env[69992]: _type = "Task" [ 879.458831] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.469621] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896669, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.542778] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84993ed-fc60-4abd-9b9b-81b16dcc8514 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.563029] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831917d3-c2e1-47c1-acbc-dae779493521 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.608201] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4e8761-3c83-48d5-b1ad-3ab1057a15fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.618608] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0f5f85-5658-46e8-b36f-0177cd7e1455 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.639066] env[69992]: DEBUG nova.compute.provider_tree [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.644638] env[69992]: DEBUG nova.network.neutron [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Successfully created port: 8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.688303] env[69992]: DEBUG oslo_vmware.api [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896664, 'name': PowerOnVM_Task, 'duration_secs': 0.538958} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.688657] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.688927] env[69992]: INFO nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Took 9.11 seconds to spawn the instance on the hypervisor. [ 879.689163] env[69992]: DEBUG nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.690044] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd56e2b-228d-4c72-8452-6bca9c261dce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.882592] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896666, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.982064] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896669, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.058517] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.101788] env[69992]: DEBUG nova.compute.manager [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Received event network-vif-plugged-3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.104632] env[69992]: DEBUG oslo_concurrency.lockutils [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] Acquiring lock "c1d73002-6e69-41a6-95b3-34dccaf872ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.104632] env[69992]: DEBUG oslo_concurrency.lockutils [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.104632] env[69992]: DEBUG oslo_concurrency.lockutils [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.104632] env[69992]: DEBUG nova.compute.manager [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] No waiting events found dispatching network-vif-plugged-3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.104632] env[69992]: WARNING nova.compute.manager [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Received unexpected event network-vif-plugged-3ab410e0-7643-4dc4-b15e-ca8b2701aefa for instance with vm_state building and task_state spawning. [ 880.105134] env[69992]: DEBUG nova.compute.manager [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Received event network-changed-3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.105134] env[69992]: DEBUG nova.compute.manager [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Refreshing instance network info cache due to event network-changed-3ab410e0-7643-4dc4-b15e-ca8b2701aefa. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 880.105134] env[69992]: DEBUG oslo_concurrency.lockutils [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] Acquiring lock "refresh_cache-c1d73002-6e69-41a6-95b3-34dccaf872ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.105134] env[69992]: DEBUG oslo_concurrency.lockutils [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] Acquired lock "refresh_cache-c1d73002-6e69-41a6-95b3-34dccaf872ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.105134] env[69992]: DEBUG nova.network.neutron [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Refreshing network info cache for port 3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 880.113054] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:45:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='113561657',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1482757508',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.113054] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.113054] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.113491] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.113491] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.113491] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.113491] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.113491] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.113774] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.113774] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.113774] env[69992]: DEBUG nova.virt.hardware [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.114781] env[69992]: DEBUG nova.compute.manager [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.115632] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4c4d85-3f47-41fc-b76b-d241b76feebc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.120819] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39fdc68-c7e4-4867-8943-df2b0061dfcc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.135289] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6e6508-ef73-48fb-b8bb-25c83df0280c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.142773] env[69992]: DEBUG nova.scheduler.client.report [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.214707] env[69992]: INFO nova.compute.manager [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Took 20.15 seconds to build instance. [ 880.379983] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.764512} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.379983] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] eba81db1-973c-4981-baca-cb98e4087510/eba81db1-973c-4981-baca-cb98e4087510.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 880.379983] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 880.380330] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3691d4a8-2506-4b10-a1ed-def40d7d9dd2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.389110] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 880.389110] env[69992]: value = "task-2896670" [ 880.389110] env[69992]: _type = "Task" [ 880.389110] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.401923] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896670, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.473060] env[69992]: DEBUG nova.network.neutron [-] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.475282] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896669, 'name': CreateVM_Task, 'duration_secs': 0.837651} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.475282] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.475894] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.476097] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.476413] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 880.476724] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d50c6900-9da1-41fd-8ea8-8462ab27a789 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.482945] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 880.482945] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5218e12d-4764-24f4-fdde-ff40150aa801" [ 880.482945] env[69992]: _type = "Task" [ 880.482945] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.494688] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5218e12d-4764-24f4-fdde-ff40150aa801, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.644047] env[69992]: INFO nova.compute.manager [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] instance snapshotting [ 880.649570] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a667fd5e-03f1-4944-9af3-5e516bd77b1c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.657202] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.657202] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 880.678881] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.531s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.680576] env[69992]: INFO nova.compute.claims [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.687403] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e44917f-7593-4f9b-9fd0-61c826fb9ff9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.718231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-946f5f72-80a2-42b0-8b9b-fa7135064df2 tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.676s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.902082] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896670, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080405} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.902366] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.903204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1157509c-9311-4ad7-a38d-194fcfe744b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.934483] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] eba81db1-973c-4981-baca-cb98e4087510/eba81db1-973c-4981-baca-cb98e4087510.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.934806] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdddf63b-066e-41c9-bbd4-26427021ed0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.950119] env[69992]: DEBUG nova.network.neutron [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Updated VIF entry in instance network info cache for port 3ab410e0-7643-4dc4-b15e-ca8b2701aefa. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.950259] env[69992]: DEBUG nova.network.neutron [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Updating instance_info_cache with network_info: [{"id": "3ab410e0-7643-4dc4-b15e-ca8b2701aefa", "address": "fa:16:3e:37:be:b3", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ab410e0-76", "ovs_interfaceid": "3ab410e0-7643-4dc4-b15e-ca8b2701aefa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.959624] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 880.959624] env[69992]: value = "task-2896671" [ 880.959624] env[69992]: _type = "Task" [ 880.959624] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.973902] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896671, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.979529] env[69992]: INFO nova.compute.manager [-] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Took 1.75 seconds to deallocate network for instance. [ 881.001116] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5218e12d-4764-24f4-fdde-ff40150aa801, 'name': SearchDatastore_Task, 'duration_secs': 0.011954} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.001634] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.001977] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.002394] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.002662] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.002973] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.003378] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fab0d00b-5c2e-4f6e-89a8-a1932e81af59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.013374] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.013597] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 881.014429] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f70f54a-9046-4678-a0b2-4d98a23eef86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.021053] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 881.021053] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e65ca7-83bd-12d6-efb4-d91766783125" [ 881.021053] env[69992]: _type = "Task" [ 881.021053] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.030573] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e65ca7-83bd-12d6-efb4-d91766783125, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.186575] env[69992]: DEBUG nova.compute.utils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 881.195274] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 881.196299] env[69992]: DEBUG nova.network.neutron [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.197955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "64ab568c-a2ef-4bac-8885-3dde76f9f764" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.197955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.197955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "64ab568c-a2ef-4bac-8885-3dde76f9f764-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.198194] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.198286] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.200806] env[69992]: INFO nova.compute.manager [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Terminating instance [ 881.205528] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 881.206339] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-87409c69-ed1e-43ed-b684-4210066bfa05 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.216765] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 881.216765] env[69992]: value = "task-2896672" [ 881.216765] env[69992]: _type = "Task" [ 881.216765] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.221203] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 881.232622] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896672, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.285143] env[69992]: DEBUG nova.policy [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '174f0db471f84f40a2e18bf813e2480e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fc8205ec2e14fdba28998521b552a69', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 881.453675] env[69992]: DEBUG oslo_concurrency.lockutils [req-22575d57-e65c-433b-8270-776a7ccd852c req-35a232e1-c2b4-4587-99f2-42b3cd49eb8b service nova] Releasing lock "refresh_cache-c1d73002-6e69-41a6-95b3-34dccaf872ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.472262] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896671, 'name': ReconfigVM_Task, 'duration_secs': 0.362} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.472621] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Reconfigured VM instance instance-0000000d to attach disk [datastore2] eba81db1-973c-4981-baca-cb98e4087510/eba81db1-973c-4981-baca-cb98e4087510.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.473214] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-940cc59b-9028-462b-aa81-9ea681ca5be2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.482218] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 881.482218] env[69992]: value = "task-2896673" [ 881.482218] env[69992]: _type = "Task" [ 881.482218] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.494161] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.494463] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896673, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.538614] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e65ca7-83bd-12d6-efb4-d91766783125, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.538614] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a199feb-96cf-4ccf-aeef-016656493cbc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.543120] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 881.543120] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d1551b-f9f8-5788-1044-7cc73e938085" [ 881.543120] env[69992]: _type = "Task" [ 881.543120] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.554662] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d1551b-f9f8-5788-1044-7cc73e938085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.698943] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 881.710541] env[69992]: DEBUG nova.compute.manager [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 881.710858] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.711712] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffeed0c3-18b5-4550-be98-45ea0ddf8479 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.735401] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.735401] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a8ba752-3548-4217-ba9d-75b8efef89a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.743414] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896672, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.747180] env[69992]: DEBUG oslo_vmware.api [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 881.747180] env[69992]: value = "task-2896674" [ 881.747180] env[69992]: _type = "Task" [ 881.747180] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.751384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.759167] env[69992]: DEBUG oslo_vmware.api [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896674, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.843189] env[69992]: DEBUG nova.network.neutron [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Successfully updated port: 8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.974827] env[69992]: DEBUG nova.network.neutron [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Successfully created port: b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.997506] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896673, 'name': Rename_Task, 'duration_secs': 0.337035} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.998076] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.998437] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd0d1071-cb4c-4d52-aad0-97e6b23efa6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.008706] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 882.008706] env[69992]: value = "task-2896675" [ 882.008706] env[69992]: _type = "Task" [ 882.008706] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.031434] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.060903] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d1551b-f9f8-5788-1044-7cc73e938085, 'name': SearchDatastore_Task, 'duration_secs': 0.037557} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.066978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.067275] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c1d73002-6e69-41a6-95b3-34dccaf872ef/c1d73002-6e69-41a6-95b3-34dccaf872ef.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.067766] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9ff918a-122c-4f02-a86b-410d488caf5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.078183] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 882.078183] env[69992]: value = "task-2896676" [ 882.078183] env[69992]: _type = "Task" [ 882.078183] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.096241] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.214426] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3781ea-e74f-4764-a9f5-6a588a41a394 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.228837] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896672, 'name': CreateSnapshot_Task, 'duration_secs': 0.981891} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.230768] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 882.231687] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cc3299-94e2-46fb-acfc-9da71172fe85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.235381] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ea80a5-a924-489d-8d83-b2bd64d86620 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.287951] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b32afb-3e33-46c5-9bc1-35e57312a644 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.302188] env[69992]: DEBUG oslo_vmware.api [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896674, 'name': PowerOffVM_Task, 'duration_secs': 0.259503} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.302188] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 882.302532] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 882.304441] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09e5835-2b47-49ab-991c-06df3ca9e252 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.311422] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55ead0af-6e5a-4b04-88f5-37eeedf86cb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.331774] env[69992]: DEBUG nova.compute.provider_tree [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.352932] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.353178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.354631] env[69992]: DEBUG nova.network.neutron [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.407875] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 882.408773] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 882.408773] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleting the datastore file [datastore2] 64ab568c-a2ef-4bac-8885-3dde76f9f764 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.409064] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a7050b9-5765-485c-836b-2ef876a28797 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.420689] env[69992]: DEBUG oslo_vmware.api [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for the task: (returnval){ [ 882.420689] env[69992]: value = "task-2896678" [ 882.420689] env[69992]: _type = "Task" [ 882.420689] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.432353] env[69992]: DEBUG oslo_vmware.api [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.525162] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.595606] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896676, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.659448] env[69992]: DEBUG nova.compute.manager [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Received event network-changed {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.659804] env[69992]: DEBUG nova.compute.manager [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Refreshing instance network info cache due to event network-changed. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 882.660374] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] Acquiring lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.660903] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] Acquired lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.661184] env[69992]: DEBUG nova.network.neutron [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.712166] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 882.741028] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 882.741028] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.741028] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 882.741028] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.741293] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 882.741293] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 882.741293] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 882.741293] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 882.741293] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 882.741460] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 882.741460] env[69992]: DEBUG nova.virt.hardware [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 882.741460] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad9f363-b16c-4457-a5f4-730405d36826 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.751652] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcca3101-85b6-4333-b17f-0d5dd9bd85e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.798075] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 882.798075] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-69d44a80-0368-4df7-94a9-6915fd1a5f34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.808414] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 882.808414] env[69992]: value = "task-2896679" [ 882.808414] env[69992]: _type = "Task" [ 882.808414] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.820271] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896679, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.836411] env[69992]: DEBUG nova.scheduler.client.report [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.944304] env[69992]: DEBUG nova.network.neutron [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.950475] env[69992]: DEBUG oslo_vmware.api [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Task: {'id': task-2896678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.500832} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.951153] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.951568] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.951943] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.952330] env[69992]: INFO nova.compute.manager [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Took 1.24 seconds to destroy the instance on the hypervisor. [ 882.952808] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.956721] env[69992]: DEBUG nova.compute.manager [-] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 882.956721] env[69992]: DEBUG nova.network.neutron [-] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 883.020346] env[69992]: DEBUG oslo_vmware.api [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2896675, 'name': PowerOnVM_Task, 'duration_secs': 0.820992} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.024027] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.024027] env[69992]: INFO nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Took 9.90 seconds to spawn the instance on the hypervisor. [ 883.024027] env[69992]: DEBUG nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.024027] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f92bbc-3048-454d-9c23-53b04a32912f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.094487] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578454} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.094973] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c1d73002-6e69-41a6-95b3-34dccaf872ef/c1d73002-6e69-41a6-95b3-34dccaf872ef.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.095462] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.095846] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c683cb77-2953-4048-86f7-0f4bc3373de6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.104876] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 883.104876] env[69992]: value = "task-2896680" [ 883.104876] env[69992]: _type = "Task" [ 883.104876] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.115278] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896680, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.299560] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-changed-e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.299560] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Refreshing instance network info cache due to event network-changed-e64de32e-0e37-4777-91e7-8be0da0fa147. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 883.299560] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Acquiring lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.299560] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Acquired lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.299560] env[69992]: DEBUG nova.network.neutron [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Refreshing network info cache for port e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.322366] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896679, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.342750] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.343564] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 883.351633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.143s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.353014] env[69992]: INFO nova.compute.claims [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.461944] env[69992]: DEBUG nova.network.neutron [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updating instance_info_cache with network_info: [{"id": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "address": "fa:16:3e:d2:ca:cd", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b0a96-e1", "ovs_interfaceid": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.557833] env[69992]: INFO nova.compute.manager [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Took 19.74 seconds to build instance. [ 883.622433] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896680, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078246} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.623633] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.625084] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e90d5b-c094-4ab3-93bf-6b87a6da76dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.655635] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] c1d73002-6e69-41a6-95b3-34dccaf872ef/c1d73002-6e69-41a6-95b3-34dccaf872ef.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.656671] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c16ae100-2337-44a4-8bbe-2690c1d4787d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.681134] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 883.681134] env[69992]: value = "task-2896681" [ 883.681134] env[69992]: _type = "Task" [ 883.681134] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.692853] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896681, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.822855] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896679, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.854395] env[69992]: DEBUG nova.network.neutron [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Updating instance_info_cache with network_info: [{"id": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "address": "fa:16:3e:81:67:75", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8aea81-10", "ovs_interfaceid": "ac8aea81-104c-4dc6-a761-379a3d5a7b2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.866701] env[69992]: DEBUG nova.compute.utils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 883.868158] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.868467] env[69992]: DEBUG nova.network.neutron [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.912481] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "ab3df643-58db-45b7-a572-9c040135989d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.912607] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "ab3df643-58db-45b7-a572-9c040135989d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.953287] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Releasing lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.953287] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Instance network_info: |[{"id": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "address": "fa:16:3e:d2:ca:cd", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b0a96-e1", "ovs_interfaceid": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 883.954096] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:ca:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b0b0a96-e1ab-4c92-b8d0-af130d30c696', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.963827] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Creating folder: Project (ea0f9171bc5c4034b8dbe9100bd6e007). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.966028] env[69992]: DEBUG nova.policy [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc6792edfe6245d2ba77a14aba041ca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '658cab8ee4194f7f98dd07de450f248b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 883.967604] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1165c27-be94-48ac-9823-ccebb4d62e43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.986094] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Created folder: Project (ea0f9171bc5c4034b8dbe9100bd6e007) in parent group-v581821. [ 883.986374] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Creating folder: Instances. Parent ref: group-v581866. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 883.986719] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3563f485-67a4-4278-bb12-ebb0d4e4fdc8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.001051] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Created folder: Instances in parent group-v581866. [ 884.001896] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.001896] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.001896] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b89bd2e-339a-414a-a1c1-098f03044a5d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.025860] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.025860] env[69992]: value = "task-2896684" [ 884.025860] env[69992]: _type = "Task" [ 884.025860] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.035717] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896684, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.062984] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e997ca68-5396-4aa0-b1e0-a56ac03447e3 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "eba81db1-973c-4981-baca-cb98e4087510" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.280s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.197865] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896681, 'name': ReconfigVM_Task, 'duration_secs': 0.496177} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.197865] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Reconfigured VM instance instance-0000000e to attach disk [datastore2] c1d73002-6e69-41a6-95b3-34dccaf872ef/c1d73002-6e69-41a6-95b3-34dccaf872ef.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.198366] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20ea0d32-836b-4378-945c-a315d53d9a97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.208429] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 884.208429] env[69992]: value = "task-2896685" [ 884.208429] env[69992]: _type = "Task" [ 884.208429] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.227528] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896685, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.286927] env[69992]: DEBUG nova.network.neutron [-] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.326223] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896679, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.346303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.346799] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.346880] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.347074] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.347304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.353955] env[69992]: INFO nova.compute.manager [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Terminating instance [ 884.360189] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4217f266-9adf-4b7b-8fa8-2208e9144884 tempest-ServerExternalEventsTest-1004551733 tempest-ServerExternalEventsTest-1004551733-project] Releasing lock "refresh_cache-6c58c05e-9679-4e53-89e7-c7c9cb11cff0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.368648] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 884.528636] env[69992]: DEBUG nova.network.neutron [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Successfully updated port: b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.537019] env[69992]: DEBUG nova.network.neutron [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updated VIF entry in instance network info cache for port e64de32e-0e37-4777-91e7-8be0da0fa147. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.537019] env[69992]: DEBUG nova.network.neutron [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.546880] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896684, 'name': CreateVM_Task, 'duration_secs': 0.491051} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.548206] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.549955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.549955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.550553] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.551792] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1d2fdda-08d0-4e07-8081-1c434e9ab0ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.562243] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 884.562243] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a188f4-fba5-5023-04e7-e7a2ef172b01" [ 884.562243] env[69992]: _type = "Task" [ 884.562243] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.568876] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 884.575069] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a188f4-fba5-5023-04e7-e7a2ef172b01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.715228] env[69992]: DEBUG nova.network.neutron [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Successfully created port: 73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.741178] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896685, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.792046] env[69992]: INFO nova.compute.manager [-] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Took 1.84 seconds to deallocate network for instance. [ 884.833453] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896679, 'name': CloneVM_Task, 'duration_secs': 1.873736} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.833854] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Created linked-clone VM from snapshot [ 884.834611] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bb815a-df3a-4926-a3c1-488af483be3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.848782] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Uploading image 572b940e-a598-4231-8a23-19c3b913cf9c {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 884.859290] env[69992]: DEBUG nova.compute.manager [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.859535] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.860999] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bd668f-31ac-418f-b8b2-368d5941ce04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.870890] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.871177] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dea06d4b-717f-4a9d-bb11-e19f71c84392 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.875257] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35624807-bbf0-45bf-b1c5-794350149d44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.885490] env[69992]: DEBUG oslo_vmware.api [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 884.885490] env[69992]: value = "task-2896686" [ 884.885490] env[69992]: _type = "Task" [ 884.885490] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.892436] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f0cec5-4316-4226-9d71-8a10451ba3ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.901452] env[69992]: DEBUG oslo_vmware.api [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.903598] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 884.903598] env[69992]: value = "vm-581865" [ 884.903598] env[69992]: _type = "VirtualMachine" [ 884.903598] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 884.904681] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4ee7059d-2345-4ba7-b866-51d2d27b5c15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.942275] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eb43bb-5a97-40b5-87e6-43eb3f5ad1c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.944894] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lease: (returnval){ [ 884.944894] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52768496-a314-2f41-fe2c-af22352a8e58" [ 884.944894] env[69992]: _type = "HttpNfcLease" [ 884.944894] env[69992]: } obtained for exporting VM: (result){ [ 884.944894] env[69992]: value = "vm-581865" [ 884.944894] env[69992]: _type = "VirtualMachine" [ 884.944894] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 884.945308] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the lease: (returnval){ [ 884.945308] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52768496-a314-2f41-fe2c-af22352a8e58" [ 884.945308] env[69992]: _type = "HttpNfcLease" [ 884.945308] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 884.955874] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef28648d-c928-4eac-b0c2-c61020e8ecf3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.960760] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 884.960760] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52768496-a314-2f41-fe2c-af22352a8e58" [ 884.960760] env[69992]: _type = "HttpNfcLease" [ 884.960760] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 884.961555] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 884.961555] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52768496-a314-2f41-fe2c-af22352a8e58" [ 884.961555] env[69992]: _type = "HttpNfcLease" [ 884.961555] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 884.962358] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9b0016-02f2-4561-9c7b-0503f45bb051 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.975920] env[69992]: DEBUG nova.compute.provider_tree [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.982455] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262c2ea-38fc-63c0-9377-9f4f21dc0363/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 884.982638] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262c2ea-38fc-63c0-9377-9f4f21dc0363/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 885.054493] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.054786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.054843] env[69992]: DEBUG nova.network.neutron [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.059328] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Releasing lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.059623] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Received event network-vif-deleted-0f5065fc-9e45-41f9-a922-76f438876fea {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.059861] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Received event network-vif-plugged-8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.060103] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Acquiring lock "a9274dfc-afbd-419b-a98b-053d71a05d7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.060342] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.060538] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.060808] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] No waiting events found dispatching network-vif-plugged-8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.060940] env[69992]: WARNING nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Received unexpected event network-vif-plugged-8b0b0a96-e1ab-4c92-b8d0-af130d30c696 for instance with vm_state building and task_state spawning. [ 885.061154] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Received event network-changed-8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.061339] env[69992]: DEBUG nova.compute.manager [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Refreshing instance network info cache due to event network-changed-8b0b0a96-e1ab-4c92-b8d0-af130d30c696. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 885.061600] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Acquiring lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.061775] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Acquired lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.061943] env[69992]: DEBUG nova.network.neutron [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Refreshing network info cache for port 8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.083663] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a188f4-fba5-5023-04e7-e7a2ef172b01, 'name': SearchDatastore_Task, 'duration_secs': 0.014777} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.084715] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.086195] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.086195] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.086195] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.086195] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.086195] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22566e4e-e2be-4b44-a86c-d44f765016e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.104835] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.104835] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-99d9221e-f748-4600-8dbb-779b8871c386 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.105094] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.105156] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.106244] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bebf975f-1167-47be-8711-ed61f43d66a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.115074] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 885.115074] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c8e846-ee36-a5f3-df75-cb8f03094da9" [ 885.115074] env[69992]: _type = "Task" [ 885.115074] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.124809] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c8e846-ee36-a5f3-df75-cb8f03094da9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.226151] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896685, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.302515] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.385821] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 885.400454] env[69992]: DEBUG oslo_vmware.api [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896686, 'name': PowerOffVM_Task, 'duration_secs': 0.235979} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.400534] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.400996] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.401440] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a80e574-c3a2-4f95-b2ff-9632340d6c8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.413833] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.414443] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.414728] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.414989] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.415220] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.415485] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.415820] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.420635] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.420635] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.420635] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.420635] env[69992]: DEBUG nova.virt.hardware [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.420635] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f60381-b79d-4a9a-b0d3-6c720b1942c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.432081] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259c9235-75c2-4c9e-85ea-8654e0d9d139 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.477944] env[69992]: DEBUG nova.scheduler.client.report [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.490634] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.490634] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.490783] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Deleting the datastore file [datastore2] 6c58c05e-9679-4e53-89e7-c7c9cb11cff0 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.490989] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2417928a-336b-4364-877d-ba4a0734f6ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.505180] env[69992]: DEBUG oslo_vmware.api [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for the task: (returnval){ [ 885.505180] env[69992]: value = "task-2896689" [ 885.505180] env[69992]: _type = "Task" [ 885.505180] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.521710] env[69992]: DEBUG oslo_vmware.api [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.639685] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c8e846-ee36-a5f3-df75-cb8f03094da9, 'name': SearchDatastore_Task, 'duration_secs': 0.030325} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.641986] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d86b67f-8f58-44c6-96c6-d7fbe767a664 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.655965] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 885.655965] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1d7ca-c90c-3b10-af74-1f5755dca673" [ 885.655965] env[69992]: _type = "Task" [ 885.655965] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.657630] env[69992]: DEBUG nova.network.neutron [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.675377] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1d7ca-c90c-3b10-af74-1f5755dca673, 'name': SearchDatastore_Task, 'duration_secs': 0.013549} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.675952] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.676307] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a9274dfc-afbd-419b-a98b-053d71a05d7c/a9274dfc-afbd-419b-a98b-053d71a05d7c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.676671] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee4e07e8-54de-4607-8c20-38bd6cf4491f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.690799] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 885.690799] env[69992]: value = "task-2896690" [ 885.690799] env[69992]: _type = "Task" [ 885.690799] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.699739] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.730080] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896685, 'name': Rename_Task, 'duration_secs': 1.259564} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.730685] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.730978] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5322740b-2328-4e1f-8eba-13bc49380de0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.740944] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 885.740944] env[69992]: value = "task-2896691" [ 885.740944] env[69992]: _type = "Task" [ 885.740944] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.753532] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.987959] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.636s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.989653] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 885.994432] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.239s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.994432] env[69992]: DEBUG nova.objects.instance [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lazy-loading 'resources' on Instance uuid 93b78a8b-389c-4114-8c1d-da80146d80f3 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.025025] env[69992]: DEBUG oslo_vmware.api [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Task: {'id': task-2896689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203873} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.026513] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.028916] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.028916] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.028916] env[69992]: INFO nova.compute.manager [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 886.028916] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 886.028916] env[69992]: DEBUG nova.compute.manager [-] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.028916] env[69992]: DEBUG nova.network.neutron [-] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.204391] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896690, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.263208] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.362053] env[69992]: DEBUG nova.network.neutron [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Updating instance_info_cache with network_info: [{"id": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "address": "fa:16:3e:c0:ec:2a", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35f3c6b-88", "ovs_interfaceid": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.446422] env[69992]: DEBUG nova.network.neutron [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updated VIF entry in instance network info cache for port 8b0b0a96-e1ab-4c92-b8d0-af130d30c696. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.447306] env[69992]: DEBUG nova.network.neutron [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updating instance_info_cache with network_info: [{"id": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "address": "fa:16:3e:d2:ca:cd", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b0a96-e1", "ovs_interfaceid": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.505553] env[69992]: DEBUG nova.compute.utils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.509812] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.509812] env[69992]: DEBUG nova.network.neutron [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.555096] env[69992]: DEBUG nova.compute.manager [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Received event network-vif-deleted-bb164768-c900-42bd-819e-eb523bfc2d54 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.555365] env[69992]: DEBUG nova.compute.manager [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Received event network-vif-plugged-b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.555662] env[69992]: DEBUG oslo_concurrency.lockutils [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] Acquiring lock "27580836-7ab5-4e64-a985-3e6fc22a8b77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.555891] env[69992]: DEBUG oslo_concurrency.lockutils [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.556094] env[69992]: DEBUG oslo_concurrency.lockutils [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.556313] env[69992]: DEBUG nova.compute.manager [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] No waiting events found dispatching network-vif-plugged-b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 886.556518] env[69992]: WARNING nova.compute.manager [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Received unexpected event network-vif-plugged-b35f3c6b-88f2-436b-994c-ba4ce4ad2662 for instance with vm_state building and task_state spawning. [ 886.556666] env[69992]: DEBUG nova.compute.manager [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Received event network-changed-b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.556856] env[69992]: DEBUG nova.compute.manager [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Refreshing instance network info cache due to event network-changed-b35f3c6b-88f2-436b-994c-ba4ce4ad2662. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 886.557052] env[69992]: DEBUG oslo_concurrency.lockutils [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] Acquiring lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.639861] env[69992]: DEBUG nova.policy [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfb2b2303d6448da9043701c396a2b4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02824f4021a5400583cf13cd553207fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.705813] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679714} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.707269] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a9274dfc-afbd-419b-a98b-053d71a05d7c/a9274dfc-afbd-419b-a98b-053d71a05d7c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.707520] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.707873] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8acb0462-5a12-4140-baa5-e1d0d968559d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.719719] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 886.719719] env[69992]: value = "task-2896692" [ 886.719719] env[69992]: _type = "Task" [ 886.719719] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.733290] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.754887] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.863341] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.863661] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Instance network_info: |[{"id": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "address": "fa:16:3e:c0:ec:2a", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35f3c6b-88", "ovs_interfaceid": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 886.863952] env[69992]: DEBUG oslo_concurrency.lockutils [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] Acquired lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.864159] env[69992]: DEBUG nova.network.neutron [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Refreshing network info cache for port b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.865526] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:ec:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b35f3c6b-88f2-436b-994c-ba4ce4ad2662', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.874092] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Creating folder: Project (3fc8205ec2e14fdba28998521b552a69). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 886.878508] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1601f04d-1472-4ee6-bce3-6f3ba3cd7de2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.892977] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Created folder: Project (3fc8205ec2e14fdba28998521b552a69) in parent group-v581821. [ 886.893209] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Creating folder: Instances. Parent ref: group-v581869. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 886.893547] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c24e5ea2-5bc1-43ea-9c42-b36f8dc7062d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.904414] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Created folder: Instances in parent group-v581869. [ 886.904688] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 886.907523] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 886.907965] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6351a772-79cf-4a22-a4cc-b5d03a81bedd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.940337] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.940337] env[69992]: value = "task-2896695" [ 886.940337] env[69992]: _type = "Task" [ 886.940337] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.948496] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b996aaf0-92b0-4a55-99cd-748f1f102c0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.954417] env[69992]: DEBUG oslo_concurrency.lockutils [req-3158e17a-83b8-43fb-bb3f-4809f8d1a821 req-1805c077-d70f-4e33-997d-38b191d0bc00 service nova] Releasing lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.963182] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896695, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.963182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20af0694-15a7-42dc-93bb-3ca8be69f35b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.997101] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab037c98-f078-48d6-9481-af914cb476c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.006159] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee17c23-854d-4178-a6eb-b65f8a5355bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.011569] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.027080] env[69992]: DEBUG nova.compute.provider_tree [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.234394] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085393} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.234759] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.235647] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8dcb7d-c9ed-4344-a430-5bd81003374a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.269060] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] a9274dfc-afbd-419b-a98b-053d71a05d7c/a9274dfc-afbd-419b-a98b-053d71a05d7c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.271420] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6886c6e-7f62-4db7-ba96-7eb45870303b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.303240] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.304828] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 887.304828] env[69992]: value = "task-2896696" [ 887.304828] env[69992]: _type = "Task" [ 887.304828] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.319193] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896696, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.454855] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896695, 'name': CreateVM_Task, 'duration_secs': 0.417027} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.455374] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 887.458639] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.458639] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.458639] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 887.461391] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca7dfe99-0311-450f-9582-8a652de58f35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.468060] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 887.468060] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521fb592-034e-f29f-53fc-924c923f5e94" [ 887.468060] env[69992]: _type = "Task" [ 887.468060] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.480798] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521fb592-034e-f29f-53fc-924c923f5e94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.502864] env[69992]: DEBUG nova.network.neutron [-] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.529748] env[69992]: DEBUG nova.scheduler.client.report [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.540857] env[69992]: DEBUG nova.network.neutron [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Successfully created port: 617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.770738] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.820374] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896696, 'name': ReconfigVM_Task, 'duration_secs': 0.405102} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.820828] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Reconfigured VM instance instance-0000000f to attach disk [datastore2] a9274dfc-afbd-419b-a98b-053d71a05d7c/a9274dfc-afbd-419b-a98b-053d71a05d7c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.821860] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18b4c43d-2eaf-4584-8ba8-84a9a827b8e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.832133] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 887.832133] env[69992]: value = "task-2896697" [ 887.832133] env[69992]: _type = "Task" [ 887.832133] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.843482] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896697, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.984758] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521fb592-034e-f29f-53fc-924c923f5e94, 'name': SearchDatastore_Task, 'duration_secs': 0.017511} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.985204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.985959] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.985959] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.985959] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.987786] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.988356] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42715e1d-33ce-4857-b7f6-dc05b42741bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.000558] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.000730] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.001730] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6555670-9f07-4871-9b90-f2b46d8d1034 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.008478] env[69992]: INFO nova.compute.manager [-] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Took 1.98 seconds to deallocate network for instance. [ 888.016746] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 888.016746] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7e4d7-8dcb-ade6-f422-1eb834037191" [ 888.016746] env[69992]: _type = "Task" [ 888.016746] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.025424] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.039398] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7e4d7-8dcb-ade6-f422-1eb834037191, 'name': SearchDatastore_Task, 'duration_secs': 0.016153} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.040183] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.045963] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.160s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.045963] env[69992]: INFO nova.compute.claims [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.047988] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28f8ccd5-0232-47ff-aa6b-605aa2fa75a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.056661] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 888.056661] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526978b9-e39e-9535-a635-b489093581a6" [ 888.056661] env[69992]: _type = "Task" [ 888.056661] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.063297] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.063297] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.063297] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.063297] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.063547] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.063547] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.063649] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.063820] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.063982] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.064553] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.064553] env[69992]: DEBUG nova.virt.hardware [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.065407] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e213bd36-afa2-4214-a9bd-0e9c38d18670 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.074164] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526978b9-e39e-9535-a635-b489093581a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.076125] env[69992]: INFO nova.scheduler.client.report [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Deleted allocations for instance 93b78a8b-389c-4114-8c1d-da80146d80f3 [ 888.084830] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7f7587-d284-4e30-aedc-535a29c73d15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.207908] env[69992]: DEBUG nova.network.neutron [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Updated VIF entry in instance network info cache for port b35f3c6b-88f2-436b-994c-ba4ce4ad2662. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.208505] env[69992]: DEBUG nova.network.neutron [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Updating instance_info_cache with network_info: [{"id": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "address": "fa:16:3e:c0:ec:2a", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35f3c6b-88", "ovs_interfaceid": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.244867] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.245251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.271282] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.343888] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896697, 'name': Rename_Task, 'duration_secs': 0.198439} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.345658] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.345761] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4685fc11-d33e-4aa6-9e93-398c4d7be6ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.356948] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 888.356948] env[69992]: value = "task-2896698" [ 888.356948] env[69992]: _type = "Task" [ 888.356948] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.367492] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896698, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.437833] env[69992]: DEBUG nova.compute.manager [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Received event network-changed-54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 888.438366] env[69992]: DEBUG nova.compute.manager [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Refreshing instance network info cache due to event network-changed-54980674-0d82-4eac-8cb8-3d49bf81e6f0. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 888.438366] env[69992]: DEBUG oslo_concurrency.lockutils [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] Acquiring lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.438482] env[69992]: DEBUG oslo_concurrency.lockutils [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] Acquired lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.438959] env[69992]: DEBUG nova.network.neutron [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Refreshing network info cache for port 54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.525509] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.570572] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526978b9-e39e-9535-a635-b489093581a6, 'name': SearchDatastore_Task, 'duration_secs': 0.016242} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.571622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.571882] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/27580836-7ab5-4e64-a985-3e6fc22a8b77.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.572433] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0946bce0-1ce7-4b36-b4d1-df8a226ded43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.582704] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 888.582704] env[69992]: value = "task-2896699" [ 888.582704] env[69992]: _type = "Task" [ 888.582704] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.589859] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a575f903-1155-4a1c-928f-2adec3bed2c3 tempest-DeleteServersAdminTestJSON-1743919994 tempest-DeleteServersAdminTestJSON-1743919994-project-admin] Lock "93b78a8b-389c-4114-8c1d-da80146d80f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.418s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.602840] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.712084] env[69992]: DEBUG oslo_concurrency.lockutils [req-59b78ea2-761c-4f30-b3f9-86cdc8a1a1ba req-2c33be5f-955c-49c0-90d6-7fb5906fb3ca service nova] Releasing lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.775861] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.866740] env[69992]: DEBUG nova.network.neutron [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Successfully updated port: 73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.876575] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896698, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.099565] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896699, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.283551] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.369406] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "refresh_cache-9bab6bf7-43c8-4cc3-b484-4472f1acdf45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.373857] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "refresh_cache-9bab6bf7-43c8-4cc3-b484-4472f1acdf45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.373857] env[69992]: DEBUG nova.network.neutron [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.378156] env[69992]: DEBUG oslo_vmware.api [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896698, 'name': PowerOnVM_Task, 'duration_secs': 0.993247} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.378156] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.378472] env[69992]: INFO nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Took 9.32 seconds to spawn the instance on the hypervisor. [ 889.378472] env[69992]: DEBUG nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.383272] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf82d769-dd75-4149-8ad0-e6d44ea71ddd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.565957] env[69992]: DEBUG nova.compute.manager [req-f4f1b269-a174-4510-9169-ff6be898cc05 req-66d7831d-3b68-4e28-8063-a273adc94de3 service nova] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Received event network-vif-deleted-ac8aea81-104c-4dc6-a761-379a3d5a7b2d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.594641] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66243} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.594884] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/27580836-7ab5-4e64-a985-3e6fc22a8b77.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 889.595109] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.595389] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11cc431f-8124-4b30-a7a5-531ec62d5535 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.605589] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 889.605589] env[69992]: value = "task-2896700" [ 889.605589] env[69992]: _type = "Task" [ 889.605589] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.624060] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896700, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.689648] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f801942-513f-4cb7-b45d-90af81d21a7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.698857] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a711ea78-9d4e-4477-ad00-39e0550941fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.753253] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3001a9d-a4c9-4e30-a7cc-9a146036cb37 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.761905] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28251da2-f71e-467e-a4f6-6fbc03339411 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.785018] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.785018] env[69992]: DEBUG nova.compute.provider_tree [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.855673] env[69992]: DEBUG nova.network.neutron [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updated VIF entry in instance network info cache for port 54980674-0d82-4eac-8cb8-3d49bf81e6f0. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.856089] env[69992]: DEBUG nova.network.neutron [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updating instance_info_cache with network_info: [{"id": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "address": "fa:16:3e:d4:0a:13", "network": {"id": "654372b0-0166-4b14-b821-c42c0ad0568b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1742099554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96a05199dc1445dcb7b42b7feb26ed2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54980674-0d", "ovs_interfaceid": "54980674-0d82-4eac-8cb8-3d49bf81e6f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.905625] env[69992]: INFO nova.compute.manager [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Took 20.95 seconds to build instance. [ 889.971936] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.972882] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.016189] env[69992]: DEBUG nova.network.neutron [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.124799] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896700, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.189204} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.125512] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.126086] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5894639-5043-4dda-a96d-d88602001fa8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.157411] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/27580836-7ab5-4e64-a985-3e6fc22a8b77.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.157761] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6fd36af-5237-49c6-9f2a-43c216f48464 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.181421] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 890.181421] env[69992]: value = "task-2896701" [ 890.181421] env[69992]: _type = "Task" [ 890.181421] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.193188] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.281385] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.292963] env[69992]: DEBUG nova.scheduler.client.report [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.360650] env[69992]: DEBUG oslo_concurrency.lockutils [req-661c3eab-557c-430a-a38e-31f121826c78 req-4d94f800-d85a-415d-9a3a-2cbc7b0f4656 service nova] Releasing lock "refresh_cache-eba81db1-973c-4981-baca-cb98e4087510" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.361972] env[69992]: DEBUG nova.network.neutron [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Successfully updated port: 617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.407529] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6feb4672-ac16-477e-92eb-8480ec9475d6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.548s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.694419] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.779686] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.804928] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.805473] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 890.810964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.526s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.815632] env[69992]: INFO nova.compute.claims [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.866074] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "refresh_cache-27492ef7-8258-4001-b3b3-5bcb94e12c1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.867578] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "refresh_cache-27492ef7-8258-4001-b3b3-5bcb94e12c1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.867578] env[69992]: DEBUG nova.network.neutron [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.910945] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 891.011997] env[69992]: DEBUG nova.network.neutron [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Updating instance_info_cache with network_info: [{"id": "73053414-72bf-473f-8a22-4e100e8ced17", "address": "fa:16:3e:20:df:42", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73053414-72", "ovs_interfaceid": "73053414-72bf-473f-8a22-4e100e8ced17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.194733] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896701, 'name': ReconfigVM_Task, 'duration_secs': 0.780133} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.195340] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/27580836-7ab5-4e64-a985-3e6fc22a8b77.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.196084] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2093505c-5b8c-4fc8-a1c6-e360958cac30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.206223] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 891.206223] env[69992]: value = "task-2896702" [ 891.206223] env[69992]: _type = "Task" [ 891.206223] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.219535] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896702, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.284519] env[69992]: DEBUG oslo_vmware.api [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896691, 'name': PowerOnVM_Task, 'duration_secs': 5.209295} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.284869] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.284968] env[69992]: INFO nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Took 15.69 seconds to spawn the instance on the hypervisor. [ 891.285490] env[69992]: DEBUG nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 891.286501] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9550aa-87e9-4519-8191-6928931e3432 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.319941] env[69992]: DEBUG nova.compute.utils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 891.324650] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 891.324840] env[69992]: DEBUG nova.network.neutron [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 891.426217] env[69992]: DEBUG nova.network.neutron [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.457881] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.467662] env[69992]: DEBUG nova.policy [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfb2b2303d6448da9043701c396a2b4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02824f4021a5400583cf13cd553207fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 891.515506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "refresh_cache-9bab6bf7-43c8-4cc3-b484-4472f1acdf45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.515827] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Instance network_info: |[{"id": "73053414-72bf-473f-8a22-4e100e8ced17", "address": "fa:16:3e:20:df:42", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73053414-72", "ovs_interfaceid": "73053414-72bf-473f-8a22-4e100e8ced17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.516275] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:df:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73053414-72bf-473f-8a22-4e100e8ced17', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.529652] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating folder: Project (658cab8ee4194f7f98dd07de450f248b). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.531837] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-634e9b2b-d933-4ef2-be9d-309e07f85987 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.547670] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created folder: Project (658cab8ee4194f7f98dd07de450f248b) in parent group-v581821. [ 891.550351] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating folder: Instances. Parent ref: group-v581872. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.550351] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2160bad3-e5f2-4642-a1c2-a66a9c8a4c0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.570343] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created folder: Instances in parent group-v581872. [ 891.570343] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 891.570343] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.570343] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e3eaffe-dabb-4659-adc9-798d471360d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.601448] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.601448] env[69992]: value = "task-2896705" [ 891.601448] env[69992]: _type = "Task" [ 891.601448] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.616515] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896705, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.717470] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896702, 'name': Rename_Task, 'duration_secs': 0.300134} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.717535] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 891.717894] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfeb6708-2d30-4bd6-9a7a-c62fbe5c2586 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.728781] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 891.728781] env[69992]: value = "task-2896706" [ 891.728781] env[69992]: _type = "Task" [ 891.728781] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.738550] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.811869] env[69992]: INFO nova.compute.manager [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Took 27.49 seconds to build instance. [ 891.835042] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 891.855503] env[69992]: DEBUG nova.network.neutron [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Updating instance_info_cache with network_info: [{"id": "617fc6d5-b33e-407b-8a59-8a6def94c1f4", "address": "fa:16:3e:8b:85:5e", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617fc6d5-b3", "ovs_interfaceid": "617fc6d5-b33e-407b-8a59-8a6def94c1f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.116338] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896705, 'name': CreateVM_Task, 'duration_secs': 0.408471} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.119722] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.123134] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.123134] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.126576] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.126576] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e965404d-2ce0-4e71-8d2d-cf11744fde58 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.137772] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.138917] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.139281] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 892.139281] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5253d983-b8bc-df7d-eb18-062337fa611d" [ 892.139281] env[69992]: _type = "Task" [ 892.139281] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.163375] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5253d983-b8bc-df7d-eb18-062337fa611d, 'name': SearchDatastore_Task, 'duration_secs': 0.017271} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.163731] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.163990] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.164276] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.165515] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.165515] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.165515] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14031e8d-329e-4827-9878-c7461f502ec4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.181143] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.181363] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.182731] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fbeae1f-32f8-42f3-9df3-e9f34a25b4bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.194382] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 892.194382] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5201d325-cecb-62f2-6da3-90dd0d417d92" [ 892.194382] env[69992]: _type = "Task" [ 892.194382] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.208189] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5201d325-cecb-62f2-6da3-90dd0d417d92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.240849] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896706, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.313672] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9edc14fa-b040-486c-8e00-050e196c6322 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.008s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.358117] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "refresh_cache-27492ef7-8258-4001-b3b3-5bcb94e12c1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.358479] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance network_info: |[{"id": "617fc6d5-b33e-407b-8a59-8a6def94c1f4", "address": "fa:16:3e:8b:85:5e", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617fc6d5-b3", "ovs_interfaceid": "617fc6d5-b33e-407b-8a59-8a6def94c1f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.358880] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:85:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '617fc6d5-b33e-407b-8a59-8a6def94c1f4', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.374319] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating folder: Project (02824f4021a5400583cf13cd553207fa). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.380297] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25857734-d52a-4de9-a1ab-eab1550b8dab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.395209] env[69992]: DEBUG nova.compute.manager [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Received event network-vif-plugged-73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.395535] env[69992]: DEBUG oslo_concurrency.lockutils [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] Acquiring lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.395782] env[69992]: DEBUG oslo_concurrency.lockutils [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.395985] env[69992]: DEBUG oslo_concurrency.lockutils [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.397243] env[69992]: DEBUG nova.compute.manager [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] No waiting events found dispatching network-vif-plugged-73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.397431] env[69992]: WARNING nova.compute.manager [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Received unexpected event network-vif-plugged-73053414-72bf-473f-8a22-4e100e8ced17 for instance with vm_state building and task_state spawning. [ 892.397918] env[69992]: DEBUG nova.compute.manager [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Received event network-changed-73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.397918] env[69992]: DEBUG nova.compute.manager [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Refreshing instance network info cache due to event network-changed-73053414-72bf-473f-8a22-4e100e8ced17. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 892.398052] env[69992]: DEBUG oslo_concurrency.lockutils [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] Acquiring lock "refresh_cache-9bab6bf7-43c8-4cc3-b484-4472f1acdf45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.398188] env[69992]: DEBUG oslo_concurrency.lockutils [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] Acquired lock "refresh_cache-9bab6bf7-43c8-4cc3-b484-4472f1acdf45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.398404] env[69992]: DEBUG nova.network.neutron [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Refreshing network info cache for port 73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.410695] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created folder: Project (02824f4021a5400583cf13cd553207fa) in parent group-v581821. [ 892.410887] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating folder: Instances. Parent ref: group-v581875. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 892.413414] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19b29158-312c-4b4e-ab6f-cf6fef47398e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.431728] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created folder: Instances in parent group-v581875. [ 892.431952] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.435903] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.438771] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-543ac734-87f4-4a7d-99d6-834cf5bb7764 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.474084] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.474084] env[69992]: value = "task-2896709" [ 892.474084] env[69992]: _type = "Task" [ 892.474084] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.484658] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896709, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.521968] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8549629-a79f-41cc-960b-c4cd720bfcdb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.533211] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dce2d43-8dc9-4150-a2b3-c5557ef7c470 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.574462] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06bb6a8-0416-429d-baea-63120315eed7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.584355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0451fea4-52ef-4142-ac78-b452747b4872 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.605393] env[69992]: DEBUG nova.compute.provider_tree [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.709745] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5201d325-cecb-62f2-6da3-90dd0d417d92, 'name': SearchDatastore_Task, 'duration_secs': 0.015366} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.710600] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-366dec8a-2f92-4c95-a631-e841de3e0a5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.718498] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 892.718498] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5265e6bd-199a-01a3-b071-50dcf275e1be" [ 892.718498] env[69992]: _type = "Task" [ 892.718498] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.730291] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5265e6bd-199a-01a3-b071-50dcf275e1be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.738857] env[69992]: DEBUG nova.network.neutron [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Successfully created port: 11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 892.741902] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "c205f559-7fe6-4d7e-beba-2fc96b89d705" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.742149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.748029] env[69992]: DEBUG oslo_vmware.api [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896706, 'name': PowerOnVM_Task, 'duration_secs': 0.962862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.748206] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.748438] env[69992]: INFO nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Took 10.04 seconds to spawn the instance on the hypervisor. [ 892.748625] env[69992]: DEBUG nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 892.750044] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e46fc1d-6d33-41eb-9ee0-cf087660f0d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.819773] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 892.845217] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 892.883842] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 892.883842] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.883842] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 892.883842] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.884168] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 892.884168] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 892.884660] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 892.884847] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 892.885052] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 892.885227] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 892.885394] env[69992]: DEBUG nova.virt.hardware [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 892.886947] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c612ad6d-cce2-4062-980a-ee1cfdf89c3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.900865] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db740114-3516-4e98-9dd9-d92f4ea7a6cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.989519] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896709, 'name': CreateVM_Task, 'duration_secs': 0.383129} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.989519] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.989519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.989519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.989519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.989519] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42a25d3e-d124-4593-8bab-97a6e846eff7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.996451] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 892.996451] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52633b04-de44-aafa-7647-2ecfac241eec" [ 892.996451] env[69992]: _type = "Task" [ 892.996451] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.012638] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52633b04-de44-aafa-7647-2ecfac241eec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.112289] env[69992]: DEBUG nova.scheduler.client.report [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.235051] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5265e6bd-199a-01a3-b071-50dcf275e1be, 'name': SearchDatastore_Task, 'duration_secs': 0.019622} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.235051] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.235324] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 9bab6bf7-43c8-4cc3-b484-4472f1acdf45/9bab6bf7-43c8-4cc3-b484-4472f1acdf45.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.235606] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ad7c3ac-9102-449b-8a5e-6ee3b3ab6ce0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.246974] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 893.246974] env[69992]: value = "task-2896710" [ 893.246974] env[69992]: _type = "Task" [ 893.246974] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.262740] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.283720] env[69992]: DEBUG nova.network.neutron [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Updated VIF entry in instance network info cache for port 73053414-72bf-473f-8a22-4e100e8ced17. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.284133] env[69992]: DEBUG nova.network.neutron [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Updating instance_info_cache with network_info: [{"id": "73053414-72bf-473f-8a22-4e100e8ced17", "address": "fa:16:3e:20:df:42", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73053414-72", "ovs_interfaceid": "73053414-72bf-473f-8a22-4e100e8ced17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.285607] env[69992]: INFO nova.compute.manager [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Took 23.29 seconds to build instance. [ 893.343942] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.509317] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52633b04-de44-aafa-7647-2ecfac241eec, 'name': SearchDatastore_Task, 'duration_secs': 0.016395} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.509908] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.509908] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.510251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.510251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.510356] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.510606] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7dee9e9-3248-4817-856d-46940e64a597 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.522268] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.523265] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.523932] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1432d55a-c432-43de-aaf5-eecb1d6a978e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.536219] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 893.536219] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52dc1e94-9059-a507-50bc-233755808a6b" [ 893.536219] env[69992]: _type = "Task" [ 893.536219] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.548186] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52dc1e94-9059-a507-50bc-233755808a6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.624904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.811s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.624904] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.626742] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.987s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.761913] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896710, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.788758] env[69992]: DEBUG oslo_concurrency.lockutils [req-7bbc13f5-00a8-4145-b276-527bc2f3600d req-7d8b485b-5310-41cc-937f-94c213612a52 service nova] Releasing lock "refresh_cache-9bab6bf7-43c8-4cc3-b484-4472f1acdf45" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.793449] env[69992]: DEBUG oslo_concurrency.lockutils [None req-32294086-9538-43cd-9785-a5d972b77694 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.169s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.047225] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52dc1e94-9059-a507-50bc-233755808a6b, 'name': SearchDatastore_Task, 'duration_secs': 0.062518} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.051623] env[69992]: DEBUG nova.compute.manager [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Received event network-vif-plugged-617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 894.051832] env[69992]: DEBUG oslo_concurrency.lockutils [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] Acquiring lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.052045] env[69992]: DEBUG oslo_concurrency.lockutils [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.052214] env[69992]: DEBUG oslo_concurrency.lockutils [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.052375] env[69992]: DEBUG nova.compute.manager [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] No waiting events found dispatching network-vif-plugged-617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.052659] env[69992]: WARNING nova.compute.manager [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Received unexpected event network-vif-plugged-617fc6d5-b33e-407b-8a59-8a6def94c1f4 for instance with vm_state building and task_state spawning. [ 894.052847] env[69992]: DEBUG nova.compute.manager [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Received event network-changed-617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 894.053032] env[69992]: DEBUG nova.compute.manager [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Refreshing instance network info cache due to event network-changed-617fc6d5-b33e-407b-8a59-8a6def94c1f4. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 894.053203] env[69992]: DEBUG oslo_concurrency.lockutils [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] Acquiring lock "refresh_cache-27492ef7-8258-4001-b3b3-5bcb94e12c1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.053337] env[69992]: DEBUG oslo_concurrency.lockutils [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] Acquired lock "refresh_cache-27492ef7-8258-4001-b3b3-5bcb94e12c1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.053491] env[69992]: DEBUG nova.network.neutron [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Refreshing network info cache for port 617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.054838] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2ecc0fa-a22e-4958-a104-0f0cdbdb26b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.062496] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 894.062496] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5273d5db-5343-7423-c312-e69cd7218656" [ 894.062496] env[69992]: _type = "Task" [ 894.062496] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.073551] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5273d5db-5343-7423-c312-e69cd7218656, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.131213] env[69992]: DEBUG nova.compute.utils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 894.135794] env[69992]: INFO nova.compute.claims [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.140144] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.140144] env[69992]: DEBUG nova.network.neutron [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.190084] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262c2ea-38fc-63c0-9377-9f4f21dc0363/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 894.190694] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e302f5e5-5859-4eae-9bf1-ee67cfa93080 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.199610] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262c2ea-38fc-63c0-9377-9f4f21dc0363/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 894.199824] env[69992]: ERROR oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262c2ea-38fc-63c0-9377-9f4f21dc0363/disk-0.vmdk due to incomplete transfer. [ 894.200080] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-76071ba4-0ad9-43e8-99df-9011da6dcad2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.217685] env[69992]: DEBUG oslo_vmware.rw_handles [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262c2ea-38fc-63c0-9377-9f4f21dc0363/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 894.217914] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Uploaded image 572b940e-a598-4231-8a23-19c3b913cf9c to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 894.220011] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 894.220656] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fe041258-9c2f-4dab-ac87-90a4b4ca4acf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.236472] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 894.236472] env[69992]: value = "task-2896711" [ 894.236472] env[69992]: _type = "Task" [ 894.236472] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.250067] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896711, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.261603] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896710, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622083} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.263138] env[69992]: DEBUG nova.policy [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '730c31e7d7524a5d9d7d7e905871a5e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '216d97a77775447d967e818418defd3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.267031] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 9bab6bf7-43c8-4cc3-b484-4472f1acdf45/9bab6bf7-43c8-4cc3-b484-4472f1acdf45.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.267298] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.267583] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a710476-4012-4719-bb7e-5c2072c2baba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.276261] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 894.276261] env[69992]: value = "task-2896712" [ 894.276261] env[69992]: _type = "Task" [ 894.276261] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.285196] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896712, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.297374] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 894.581385] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5273d5db-5343-7423-c312-e69cd7218656, 'name': SearchDatastore_Task, 'duration_secs': 0.0372} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.581791] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.581960] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.582225] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06a2bd6-b902-4dc4-9769-355861493df4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.594908] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 894.594908] env[69992]: value = "task-2896713" [ 894.594908] env[69992]: _type = "Task" [ 894.594908] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.607235] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.640707] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 894.646959] env[69992]: INFO nova.compute.resource_tracker [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating resource usage from migration 085fa7b6-8373-47ff-9061-cee118d126e6 [ 894.747416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "eec50935-f553-43c7-b67b-7289299745bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.747689] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "eec50935-f553-43c7-b67b-7289299745bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.758401] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896711, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.794834] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896712, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.317449} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.795133] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.797193] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b24e31-2cfb-42e2-9dd2-63e9d20f5c72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.835393] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 9bab6bf7-43c8-4cc3-b484-4472f1acdf45/9bab6bf7-43c8-4cc3-b484-4472f1acdf45.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.839045] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a35c285-ad8c-4238-b18c-4426f1d4de2c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.865548] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 894.865548] env[69992]: value = "task-2896714" [ 894.865548] env[69992]: _type = "Task" [ 894.865548] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.876947] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896714, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.877601] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.109177] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896713, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.259860] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896711, 'name': Destroy_Task, 'duration_secs': 0.984383} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.261515] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Destroyed the VM [ 895.261515] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 895.261515] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e7d38465-997c-4188-974d-ce5cc7c7ce42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.272236] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 895.272236] env[69992]: value = "task-2896715" [ 895.272236] env[69992]: _type = "Task" [ 895.272236] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.285725] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896715, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.288083] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8506600e-e48f-49c9-a4b8-9254d3165a26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.297615] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0570b2d6-be62-4551-a702-51f7bd0f8ecc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.336029] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2a87e0-ef00-4f2c-ae95-1e2a186979fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.345932] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148afdcc-0bb2-48c4-8330-fea571842083 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.363832] env[69992]: DEBUG nova.compute.provider_tree [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.366645] env[69992]: DEBUG nova.network.neutron [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Updated VIF entry in instance network info cache for port 617fc6d5-b33e-407b-8a59-8a6def94c1f4. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.367098] env[69992]: DEBUG nova.network.neutron [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Updating instance_info_cache with network_info: [{"id": "617fc6d5-b33e-407b-8a59-8a6def94c1f4", "address": "fa:16:3e:8b:85:5e", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap617fc6d5-b3", "ovs_interfaceid": "617fc6d5-b33e-407b-8a59-8a6def94c1f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.381177] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896714, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.455881] env[69992]: DEBUG nova.network.neutron [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Successfully created port: aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.463603] env[69992]: INFO nova.compute.manager [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Rescuing [ 895.463900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.464093] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.464274] env[69992]: DEBUG nova.network.neutron [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.609570] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896713, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.763771} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.609895] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.610329] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.610447] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b14cf0f5-eaec-42c0-a2d5-bf938c04b8f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.620117] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 895.620117] env[69992]: value = "task-2896716" [ 895.620117] env[69992]: _type = "Task" [ 895.620117] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.629828] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.660327] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.697971] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.698448] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.698637] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.699663] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.699663] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.699874] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.700162] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.700340] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.700529] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.700744] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.700974] env[69992]: DEBUG nova.virt.hardware [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.701980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccb751e-b622-456c-bf05-6acb96de10c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.712955] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b195f9-fa77-4556-bade-851c7e24960a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.783961] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896715, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.831094] env[69992]: DEBUG nova.network.neutron [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Successfully updated port: 11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.876488] env[69992]: DEBUG oslo_concurrency.lockutils [req-5d412ec0-5548-4e4a-a371-300e71ea237b req-a4c2d06f-e39a-47ac-953b-74330ccd9007 service nova] Releasing lock "refresh_cache-27492ef7-8258-4001-b3b3-5bcb94e12c1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.877601] env[69992]: DEBUG nova.scheduler.client.report [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.893156] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896714, 'name': ReconfigVM_Task, 'duration_secs': 0.56667} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.893282] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 9bab6bf7-43c8-4cc3-b484-4472f1acdf45/9bab6bf7-43c8-4cc3-b484-4472f1acdf45.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.895413] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca3587c-ec18-4e05-9052-1ea6df029202 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.905664] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 895.905664] env[69992]: value = "task-2896717" [ 895.905664] env[69992]: _type = "Task" [ 895.905664] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.927112] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896717, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.133408] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147723} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.133689] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 896.134916] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7d7fcd-fe4e-4c29-9151-526c384b3d55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.164020] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 896.167498] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35de015b-04c6-4e7d-9538-941a5fadf996 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.197435] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 896.197435] env[69992]: value = "task-2896718" [ 896.197435] env[69992]: _type = "Task" [ 896.197435] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.208749] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896718, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.288071] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896715, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.334985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "refresh_cache-a49b4721-e338-4e60-b91e-137caa3c9c03" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.335170] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "refresh_cache-a49b4721-e338-4e60-b91e-137caa3c9c03" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.335328] env[69992]: DEBUG nova.network.neutron [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.394819] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.768s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.395767] env[69992]: INFO nova.compute.manager [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Migrating [ 896.395767] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.395767] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.397933] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.156s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.398311] env[69992]: DEBUG nova.objects.instance [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lazy-loading 'resources' on Instance uuid e934fc79-f7c5-4ca9-9f81-85467c1e9b45 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.422906] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896717, 'name': Rename_Task, 'duration_secs': 0.260983} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.426017] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.426017] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8420ffbb-0d4b-4dd6-96a1-3d27e9caa305 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.436466] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 896.436466] env[69992]: value = "task-2896719" [ 896.436466] env[69992]: _type = "Task" [ 896.436466] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.450035] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.458940] env[69992]: DEBUG nova.network.neutron [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Updating instance_info_cache with network_info: [{"id": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "address": "fa:16:3e:c0:ec:2a", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35f3c6b-88", "ovs_interfaceid": "b35f3c6b-88f2-436b-994c-ba4ce4ad2662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.468511] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.468669] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.593356] env[69992]: DEBUG nova.compute.manager [req-af32682d-a23c-4a82-af19-c295b64a727a req-2b7d575e-7257-408f-8820-b4948783aa2f service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Received event network-vif-plugged-11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.593580] env[69992]: DEBUG oslo_concurrency.lockutils [req-af32682d-a23c-4a82-af19-c295b64a727a req-2b7d575e-7257-408f-8820-b4948783aa2f service nova] Acquiring lock "a49b4721-e338-4e60-b91e-137caa3c9c03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.593791] env[69992]: DEBUG oslo_concurrency.lockutils [req-af32682d-a23c-4a82-af19-c295b64a727a req-2b7d575e-7257-408f-8820-b4948783aa2f service nova] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.593952] env[69992]: DEBUG oslo_concurrency.lockutils [req-af32682d-a23c-4a82-af19-c295b64a727a req-2b7d575e-7257-408f-8820-b4948783aa2f service nova] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.594608] env[69992]: DEBUG nova.compute.manager [req-af32682d-a23c-4a82-af19-c295b64a727a req-2b7d575e-7257-408f-8820-b4948783aa2f service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] No waiting events found dispatching network-vif-plugged-11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.594812] env[69992]: WARNING nova.compute.manager [req-af32682d-a23c-4a82-af19-c295b64a727a req-2b7d575e-7257-408f-8820-b4948783aa2f service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Received unexpected event network-vif-plugged-11df5954-2f09-4c8f-bab8-a5b6740bd994 for instance with vm_state building and task_state spawning. [ 896.710022] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896718, 'name': ReconfigVM_Task, 'duration_secs': 0.360977} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.711250] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.712161] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68c15764-a817-4bbf-b5db-d25758ee1d25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.720462] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 896.720462] env[69992]: value = "task-2896720" [ 896.720462] env[69992]: _type = "Task" [ 896.720462] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.737703] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896720, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.783914] env[69992]: DEBUG oslo_vmware.api [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896715, 'name': RemoveSnapshot_Task, 'duration_secs': 1.448278} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.784774] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 896.786250] env[69992]: INFO nova.compute.manager [None req-2be2901f-adb8-4a6b-8918-9a7404317732 tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Took 16.14 seconds to snapshot the instance on the hypervisor. [ 896.891354] env[69992]: DEBUG nova.network.neutron [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.901027] env[69992]: INFO nova.compute.rpcapi [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 896.901027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.959740] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896719, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.961893] env[69992]: DEBUG nova.compute.manager [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Received event network-changed-8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.963046] env[69992]: DEBUG nova.compute.manager [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Refreshing instance network info cache due to event network-changed-8b0b0a96-e1ab-4c92-b8d0-af130d30c696. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 896.963046] env[69992]: DEBUG oslo_concurrency.lockutils [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] Acquiring lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.963046] env[69992]: DEBUG oslo_concurrency.lockutils [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] Acquired lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.963046] env[69992]: DEBUG nova.network.neutron [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Refreshing network info cache for port 8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.965164] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "refresh_cache-27580836-7ab5-4e64-a985-3e6fc22a8b77" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.209967] env[69992]: DEBUG nova.network.neutron [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Updating instance_info_cache with network_info: [{"id": "11df5954-2f09-4c8f-bab8-a5b6740bd994", "address": "fa:16:3e:a3:be:eb", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11df5954-2f", "ovs_interfaceid": "11df5954-2f09-4c8f-bab8-a5b6740bd994", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.238773] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896720, 'name': Rename_Task, 'duration_secs': 0.198695} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.238773] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.239013] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9821525-51a2-48b7-83e8-1484c63c7460 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.248039] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 897.248039] env[69992]: value = "task-2896721" [ 897.248039] env[69992]: _type = "Task" [ 897.248039] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.258688] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896721, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.425333] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.425549] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.425869] env[69992]: DEBUG nova.network.neutron [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.427932] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "c1d73002-6e69-41a6-95b3-34dccaf872ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.428205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.428437] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "c1d73002-6e69-41a6-95b3-34dccaf872ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.428762] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.428877] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.432892] env[69992]: INFO nova.compute.manager [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Terminating instance [ 897.445810] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815bd6ea-3ac1-4172-968e-f234252760b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.460058] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252a1154-2535-466b-8fc9-08db2ace1f19 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.465631] env[69992]: DEBUG oslo_vmware.api [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896719, 'name': PowerOnVM_Task, 'duration_secs': 0.625587} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.469229] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.469229] env[69992]: INFO nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Took 12.08 seconds to spawn the instance on the hypervisor. [ 897.470049] env[69992]: DEBUG nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 897.473888] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fb16d0-d6cc-47e9-8f96-c6f9ddd1e965 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.507795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e04e39-5eea-4493-9dca-c506791a577d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.516434] env[69992]: DEBUG nova.network.neutron [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Successfully updated port: aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.524248] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d4cb40-bcd7-4212-88fd-ff11c2f8db74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.541450] env[69992]: DEBUG nova.compute.provider_tree [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.712391] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "refresh_cache-a49b4721-e338-4e60-b91e-137caa3c9c03" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.712710] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Instance network_info: |[{"id": "11df5954-2f09-4c8f-bab8-a5b6740bd994", "address": "fa:16:3e:a3:be:eb", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11df5954-2f", "ovs_interfaceid": "11df5954-2f09-4c8f-bab8-a5b6740bd994", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 897.713211] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:be:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11df5954-2f09-4c8f-bab8-a5b6740bd994', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.721192] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.721429] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.721651] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-762f7cec-7503-4933-961b-d5b8c7759b42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.745092] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.745092] env[69992]: value = "task-2896722" [ 897.745092] env[69992]: _type = "Task" [ 897.745092] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.755939] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896722, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.762916] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896721, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.939070] env[69992]: DEBUG nova.compute.manager [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 897.939210] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.941034] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d64140-5e7c-491d-827a-242a112784d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.950787] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.951310] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5526dbaa-5e76-4f9a-849c-a5d97e4ff35b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.963021] env[69992]: DEBUG oslo_vmware.api [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 897.963021] env[69992]: value = "task-2896723" [ 897.963021] env[69992]: _type = "Task" [ 897.963021] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.983636] env[69992]: DEBUG oslo_vmware.api [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.993783] env[69992]: DEBUG nova.network.neutron [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updated VIF entry in instance network info cache for port 8b0b0a96-e1ab-4c92-b8d0-af130d30c696. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.993783] env[69992]: DEBUG nova.network.neutron [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updating instance_info_cache with network_info: [{"id": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "address": "fa:16:3e:d2:ca:cd", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b0a96-e1", "ovs_interfaceid": "8b0b0a96-e1ab-4c92-b8d0-af130d30c696", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.025417] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "refresh_cache-73e41918-88b8-4ff7-9fdd-b45ac97c80ec" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.025705] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired lock "refresh_cache-73e41918-88b8-4ff7-9fdd-b45ac97c80ec" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.025835] env[69992]: DEBUG nova.network.neutron [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.029477] env[69992]: INFO nova.compute.manager [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Took 26.92 seconds to build instance. [ 898.046807] env[69992]: DEBUG nova.scheduler.client.report [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.111981] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "a8813822-f77b-4b73-a6dc-e0eab83b0402" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.111981] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.259635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "e0b5ad16-f631-444c-a189-167e34574316" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.259945] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "e0b5ad16-f631-444c-a189-167e34574316" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.275020] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896722, 'name': CreateVM_Task, 'duration_secs': 0.454748} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.275020] env[69992]: DEBUG oslo_vmware.api [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896721, 'name': PowerOnVM_Task, 'duration_secs': 0.780737} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.275213] env[69992]: DEBUG nova.network.neutron [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance_info_cache with network_info: [{"id": "2584dc71-913f-4c9b-922c-f8b28530b82f", "address": "fa:16:3e:42:68:21", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2584dc71-91", "ovs_interfaceid": "2584dc71-913f-4c9b-922c-f8b28530b82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.276140] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 898.276545] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.276776] env[69992]: INFO nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Took 10.25 seconds to spawn the instance on the hypervisor. [ 898.277124] env[69992]: DEBUG nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.278143] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.278293] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.278861] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 898.279650] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8719453c-2df6-40da-8bc7-bdef4002b1cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.282561] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c3310d7-b3a7-4dbe-a642-89133219ab21 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.298460] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 898.298460] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521b76e4-3e24-b167-22d2-478bd5ccab49" [ 898.298460] env[69992]: _type = "Task" [ 898.298460] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.316039] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521b76e4-3e24-b167-22d2-478bd5ccab49, 'name': SearchDatastore_Task, 'duration_secs': 0.012972} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.316039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.316134] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 898.316302] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.316441] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.316610] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.316909] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acc2ebe1-fbe3-4657-a0ef-edbfcf8275ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.327063] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.327063] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 898.327421] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f006ab-a465-4d0a-a784-ff9ec53312d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.335856] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 898.335856] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ba41f4-68e8-049f-dc78-286b02de0fc9" [ 898.335856] env[69992]: _type = "Task" [ 898.335856] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.349572] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ba41f4-68e8-049f-dc78-286b02de0fc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.471011] env[69992]: DEBUG oslo_vmware.api [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896723, 'name': PowerOffVM_Task, 'duration_secs': 0.212575} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.471407] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 898.471587] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.472543] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05a96470-5dcf-4b43-9fd2-70d13192dbed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.499976] env[69992]: DEBUG oslo_concurrency.lockutils [req-1941d86e-40d5-478f-824f-917f259ec3be req-a7fca7df-3993-4ebe-b0d6-06f14d0a31df service nova] Releasing lock "refresh_cache-a9274dfc-afbd-419b-a98b-053d71a05d7c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.542449] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b21f7074-946b-4903-9972-8383acb5f034 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.927s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.544411] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.545190] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-def6603f-85e2-4f51-b8f1-b01421419304 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.552588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.560022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.178s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.560022] env[69992]: INFO nova.compute.claims [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.562969] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.563211] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.563375] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Deleting the datastore file [datastore2] c1d73002-6e69-41a6-95b3-34dccaf872ef {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.563788] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 898.563788] env[69992]: value = "task-2896726" [ 898.563788] env[69992]: _type = "Task" [ 898.563788] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.565022] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df0357e0-b351-4935-8b7c-157da892d254 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.568608] env[69992]: DEBUG nova.network.neutron [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.583875] env[69992]: INFO nova.scheduler.client.report [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Deleted allocations for instance e934fc79-f7c5-4ca9-9f81-85467c1e9b45 [ 898.586421] env[69992]: DEBUG oslo_vmware.api [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for the task: (returnval){ [ 898.586421] env[69992]: value = "task-2896728" [ 898.586421] env[69992]: _type = "Task" [ 898.586421] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.592338] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.602617] env[69992]: DEBUG oslo_vmware.api [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.749362] env[69992]: DEBUG nova.network.neutron [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Updating instance_info_cache with network_info: [{"id": "aeab7334-78de-4ade-9c52-d77911f831fb", "address": "fa:16:3e:85:cf:90", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaeab7334-78", "ovs_interfaceid": "aeab7334-78de-4ade-9c52-d77911f831fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.778546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.808358] env[69992]: INFO nova.compute.manager [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Took 26.62 seconds to build instance. [ 898.847110] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ba41f4-68e8-049f-dc78-286b02de0fc9, 'name': SearchDatastore_Task, 'duration_secs': 0.013219} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.847889] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bf1c1b0-eeea-428e-8dc7-837ccf396233 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.854684] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 898.854684] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52775663-ce8b-9c1b-a5f1-1baa3d404dcc" [ 898.854684] env[69992]: _type = "Task" [ 898.854684] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.863822] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52775663-ce8b-9c1b-a5f1-1baa3d404dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.976272] env[69992]: DEBUG nova.compute.manager [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Received event network-changed-11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.976971] env[69992]: DEBUG nova.compute.manager [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Refreshing instance network info cache due to event network-changed-11df5954-2f09-4c8f-bab8-a5b6740bd994. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 898.976971] env[69992]: DEBUG oslo_concurrency.lockutils [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] Acquiring lock "refresh_cache-a49b4721-e338-4e60-b91e-137caa3c9c03" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.977436] env[69992]: DEBUG oslo_concurrency.lockutils [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] Acquired lock "refresh_cache-a49b4721-e338-4e60-b91e-137caa3c9c03" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.977664] env[69992]: DEBUG nova.network.neutron [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Refreshing network info cache for port 11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.046560] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.079567] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896726, 'name': PowerOffVM_Task, 'duration_secs': 0.22088} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.079846] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.081506] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a67cced-19fc-41f3-a5aa-7313867ab556 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.110495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5f929820-2047-455b-a6d2-63dc5dcc8532 tempest-ServerDiagnosticsTest-476413324 tempest-ServerDiagnosticsTest-476413324-project-member] Lock "e934fc79-f7c5-4ca9-9f81-85467c1e9b45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.764s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.115597] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444eb1bd-e215-435e-a0db-13599bdeac10 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.126269] env[69992]: DEBUG oslo_vmware.api [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Task: {'id': task-2896728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176864} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.128679] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.129056] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.129150] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.129348] env[69992]: INFO nova.compute.manager [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Took 1.19 seconds to destroy the instance on the hypervisor. [ 899.129529] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.131896] env[69992]: DEBUG nova.compute.manager [-] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.132010] env[69992]: DEBUG nova.network.neutron [-] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 899.164928] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.165546] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c44a2bc-ab69-430d-b8aa-9c7ac9283144 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.178021] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 899.178021] env[69992]: value = "task-2896731" [ 899.178021] env[69992]: _type = "Task" [ 899.178021] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.183657] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.254716] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Releasing lock "refresh_cache-73e41918-88b8-4ff7-9fdd-b45ac97c80ec" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.254811] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Instance network_info: |[{"id": "aeab7334-78de-4ade-9c52-d77911f831fb", "address": "fa:16:3e:85:cf:90", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaeab7334-78", "ovs_interfaceid": "aeab7334-78de-4ade-9c52-d77911f831fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.255239] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:cf:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aeab7334-78de-4ade-9c52-d77911f831fb', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.263117] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating folder: Project (216d97a77775447d967e818418defd3c). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 899.265022] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57a9543c-3b92-4663-9d0b-debbf712e339 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.276896] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Created folder: Project (216d97a77775447d967e818418defd3c) in parent group-v581821. [ 899.277129] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating folder: Instances. Parent ref: group-v581882. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 899.277373] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f3bb8b8-1dca-4394-b803-0bbcd74125a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.287685] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Created folder: Instances in parent group-v581882. [ 899.288277] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.291015] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.291015] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d48ca4f3-5db4-434f-ab81-21f090f4df5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.310472] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b03e796-83c3-4094-b1fe-51660eef81d9 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.117s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.316346] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.316346] env[69992]: value = "task-2896734" [ 899.316346] env[69992]: _type = "Task" [ 899.316346] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.329670] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896734, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.367221] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52775663-ce8b-9c1b-a5f1-1baa3d404dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.019424} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.367659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.368080] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a49b4721-e338-4e60-b91e-137caa3c9c03/a49b4721-e338-4e60-b91e-137caa3c9c03.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.368685] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adcf6dea-4302-4d18-8987-4232a8d47fa3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.379045] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 899.379045] env[69992]: value = "task-2896735" [ 899.379045] env[69992]: _type = "Task" [ 899.379045] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.393642] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896735, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.421593] env[69992]: DEBUG oslo_concurrency.lockutils [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.421844] env[69992]: DEBUG oslo_concurrency.lockutils [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.422062] env[69992]: DEBUG nova.compute.manager [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.423444] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3910b3f4-6c3e-471a-b9a4-6e2c97e8faae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.431634] env[69992]: DEBUG nova.compute.manager [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 899.432360] env[69992]: DEBUG nova.objects.instance [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lazy-loading 'flavor' on Instance uuid 9bab6bf7-43c8-4cc3-b484-4472f1acdf45 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.494482] env[69992]: DEBUG nova.compute.manager [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Received event network-vif-plugged-aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.494764] env[69992]: DEBUG oslo_concurrency.lockutils [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] Acquiring lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.495216] env[69992]: DEBUG oslo_concurrency.lockutils [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.495452] env[69992]: DEBUG oslo_concurrency.lockutils [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.495650] env[69992]: DEBUG nova.compute.manager [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] No waiting events found dispatching network-vif-plugged-aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 899.495868] env[69992]: WARNING nova.compute.manager [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Received unexpected event network-vif-plugged-aeab7334-78de-4ade-9c52-d77911f831fb for instance with vm_state building and task_state spawning. [ 899.496058] env[69992]: DEBUG nova.compute.manager [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Received event network-changed-aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.496239] env[69992]: DEBUG nova.compute.manager [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Refreshing instance network info cache due to event network-changed-aeab7334-78de-4ade-9c52-d77911f831fb. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 899.496442] env[69992]: DEBUG oslo_concurrency.lockutils [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] Acquiring lock "refresh_cache-73e41918-88b8-4ff7-9fdd-b45ac97c80ec" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.496594] env[69992]: DEBUG oslo_concurrency.lockutils [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] Acquired lock "refresh_cache-73e41918-88b8-4ff7-9fdd-b45ac97c80ec" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.497054] env[69992]: DEBUG nova.network.neutron [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Refreshing network info cache for port aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.577158] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.697888] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 899.698156] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.698415] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.698572] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.699047] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.701762] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7cf329d-1ebe-44d8-9847-e7d9dd10ad29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.721033] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.721033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.721843] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09c88075-3f69-423f-be4f-47d1982cb67b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.730028] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 899.730028] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5281001e-13f1-6ad1-2d26-bf732dc13de4" [ 899.730028] env[69992]: _type = "Task" [ 899.730028] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.744568] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5281001e-13f1-6ad1-2d26-bf732dc13de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.750457] env[69992]: DEBUG nova.network.neutron [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Updated VIF entry in instance network info cache for port 11df5954-2f09-4c8f-bab8-a5b6740bd994. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.750824] env[69992]: DEBUG nova.network.neutron [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Updating instance_info_cache with network_info: [{"id": "11df5954-2f09-4c8f-bab8-a5b6740bd994", "address": "fa:16:3e:a3:be:eb", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11df5954-2f", "ovs_interfaceid": "11df5954-2f09-4c8f-bab8-a5b6740bd994", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.815849] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.829267] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896734, 'name': CreateVM_Task, 'duration_secs': 0.37271} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.829466] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.830442] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.830442] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.830888] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.833827] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e24a806a-7572-4f2a-83f6-c8d9e393df19 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.841192] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 899.841192] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ad776f-df11-85ab-8ea8-8c43272d91a5" [ 899.841192] env[69992]: _type = "Task" [ 899.841192] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.851356] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ad776f-df11-85ab-8ea8-8c43272d91a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.891660] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896735, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.898305] env[69992]: DEBUG nova.network.neutron [-] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.123223] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb1f835-8856-4a2a-a58a-05a9655a3f83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.131284] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f287c709-3a0c-4ffd-9692-07516d6bcd01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.170238] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9776e1-20ac-46ed-a6e3-895ad14bac41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.178409] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e08fdf-e2ee-4844-8b46-2a6faab0b06a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.191834] env[69992]: DEBUG nova.compute.provider_tree [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.241361] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5281001e-13f1-6ad1-2d26-bf732dc13de4, 'name': SearchDatastore_Task, 'duration_secs': 0.066162} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.242239] env[69992]: DEBUG nova.network.neutron [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Updated VIF entry in instance network info cache for port aeab7334-78de-4ade-9c52-d77911f831fb. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.242556] env[69992]: DEBUG nova.network.neutron [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Updating instance_info_cache with network_info: [{"id": "aeab7334-78de-4ade-9c52-d77911f831fb", "address": "fa:16:3e:85:cf:90", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.108", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaeab7334-78", "ovs_interfaceid": "aeab7334-78de-4ade-9c52-d77911f831fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.245395] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaa37d10-4bda-4059-9d2f-84b3f1975a18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.251708] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 900.251708] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521907b7-8c07-8ede-de12-10f70ee0b5e1" [ 900.251708] env[69992]: _type = "Task" [ 900.251708] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.256444] env[69992]: DEBUG oslo_concurrency.lockutils [req-a346af1b-9971-4da3-83a3-c56a58e2f184 req-f34b86a7-fa8c-43d3-8c70-e976ce9eda60 service nova] Releasing lock "refresh_cache-a49b4721-e338-4e60-b91e-137caa3c9c03" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.261075] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521907b7-8c07-8ede-de12-10f70ee0b5e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.297718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3727e59b-855c-4eb6-9d21-3eccc410a94b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.317063] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 900.334983] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.352680] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ad776f-df11-85ab-8ea8-8c43272d91a5, 'name': SearchDatastore_Task, 'duration_secs': 0.033353} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.352930] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.353048] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.353315] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.353463] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.353636] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.353888] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d9e7ed1-61ef-40b2-a257-b0f8785d7817 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.362495] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.362708] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.363413] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa29c246-9feb-4f7f-8f80-44c13bda2e33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.368750] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 900.368750] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e1393a-a20f-293e-bd6d-02c7ef02799a" [ 900.368750] env[69992]: _type = "Task" [ 900.368750] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.376390] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e1393a-a20f-293e-bd6d-02c7ef02799a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.390062] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896735, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54195} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.390062] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a49b4721-e338-4e60-b91e-137caa3c9c03/a49b4721-e338-4e60-b91e-137caa3c9c03.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.390062] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 900.390062] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71b13ba6-7fc4-480e-9132-b9f0299d73e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.395359] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 900.395359] env[69992]: value = "task-2896736" [ 900.395359] env[69992]: _type = "Task" [ 900.395359] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.400378] env[69992]: INFO nova.compute.manager [-] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Took 1.27 seconds to deallocate network for instance. [ 900.407438] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.439831] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.440174] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef75c958-e5a1-49eb-bf61-23df135691d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.447066] env[69992]: DEBUG oslo_vmware.api [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 900.447066] env[69992]: value = "task-2896737" [ 900.447066] env[69992]: _type = "Task" [ 900.447066] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.458457] env[69992]: DEBUG oslo_vmware.api [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.699267] env[69992]: DEBUG nova.scheduler.client.report [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.747798] env[69992]: DEBUG oslo_concurrency.lockutils [req-af21c2d5-67f1-4238-982b-b42352cf5a16 req-60c809ec-1742-44aa-8e4f-f8eda1e6e153 service nova] Releasing lock "refresh_cache-73e41918-88b8-4ff7-9fdd-b45ac97c80ec" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.764721] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521907b7-8c07-8ede-de12-10f70ee0b5e1, 'name': SearchDatastore_Task, 'duration_secs': 0.009941} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.765039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.765317] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. {{(pid=69992) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 900.765481] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25345a0c-ffba-4085-98ea-15f9f683aeb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.775140] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 900.775140] env[69992]: value = "task-2896738" [ 900.775140] env[69992]: _type = "Task" [ 900.775140] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.788362] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896738, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.825212] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.825528] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c59f8452-289a-45a1-a0d9-d45e8cda7567 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.834685] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 900.834685] env[69992]: value = "task-2896739" [ 900.834685] env[69992]: _type = "Task" [ 900.834685] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.847113] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896739, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.880801] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e1393a-a20f-293e-bd6d-02c7ef02799a, 'name': SearchDatastore_Task, 'duration_secs': 0.008026} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.881877] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81ca648f-f6b6-45c5-8dc9-fa827353ff3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.888227] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 900.888227] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52964704-2b43-e2c4-3910-6232193bd139" [ 900.888227] env[69992]: _type = "Task" [ 900.888227] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.900544] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52964704-2b43-e2c4-3910-6232193bd139, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.910211] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06995} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.911493] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.911493] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.912869] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abdc9ac-1474-4ae9-bf9d-c64201d3f886 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.941184] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] a49b4721-e338-4e60-b91e-137caa3c9c03/a49b4721-e338-4e60-b91e-137caa3c9c03.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.941678] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d34b9732-2c04-4930-9537-fb17ea586d2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.971018] env[69992]: DEBUG oslo_vmware.api [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896737, 'name': PowerOffVM_Task, 'duration_secs': 0.388912} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.972532] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.972768] env[69992]: DEBUG nova.compute.manager [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.973249] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 900.973249] env[69992]: value = "task-2896740" [ 900.973249] env[69992]: _type = "Task" [ 900.973249] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.974207] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6473d4ab-c13f-4298-b136-eaf30db0dba5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.991461] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896740, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.210081] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.210081] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.213326] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.756s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.213801] env[69992]: DEBUG nova.objects.instance [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lazy-loading 'resources' on Instance uuid f249c0b9-ddd7-4b63-ae3a-11035764d3e5 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.292750] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896738, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.348550] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896739, 'name': PowerOffVM_Task, 'duration_secs': 0.267452} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.348550] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.348838] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 901.401024] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52964704-2b43-e2c4-3910-6232193bd139, 'name': SearchDatastore_Task, 'duration_secs': 0.013071} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.401024] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.401024] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 73e41918-88b8-4ff7-9fdd-b45ac97c80ec/73e41918-88b8-4ff7-9fdd-b45ac97c80ec.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.401024] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f23cdc6c-3f61-481d-ab81-c7a2a4cf2dd9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.408166] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 901.408166] env[69992]: value = "task-2896741" [ 901.408166] env[69992]: _type = "Task" [ 901.408166] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.420021] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.489763] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.495074] env[69992]: DEBUG oslo_concurrency.lockutils [None req-48b1c29a-564f-428f-9d13-a7cdadc14714 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.073s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.717064] env[69992]: DEBUG nova.compute.utils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.721734] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.721993] env[69992]: DEBUG nova.network.neutron [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.788021] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896738, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.710636} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.791427] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. [ 901.792672] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2590bf62-ba2b-42c5-919b-f8b4216d982b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.843591] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.846449] env[69992]: DEBUG nova.policy [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '044902c6075d41739188628ba5ebd58d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b51b8195c4e7418cbdaa66aa5e5aff5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 901.853012] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7e62676-a66b-4135-a159-d34fbdd6f300 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.882703] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.883136] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.883774] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.883774] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.883977] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.884246] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.884576] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.885020] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.886775] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.886775] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.886775] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.899010] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a87a4c79-ba35-467e-ab98-1c6ed6d1d8a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.924783] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 901.924783] env[69992]: value = "task-2896743" [ 901.924783] env[69992]: _type = "Task" [ 901.924783] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.927023] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 901.927023] env[69992]: value = "task-2896744" [ 901.927023] env[69992]: _type = "Task" [ 901.927023] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.947076] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896741, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.956086] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896744, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.961529] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.995024] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896740, 'name': ReconfigVM_Task, 'duration_secs': 0.60688} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.995024] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Reconfigured VM instance instance-00000013 to attach disk [datastore1] a49b4721-e338-4e60-b91e-137caa3c9c03/a49b4721-e338-4e60-b91e-137caa3c9c03.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.995906] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa6f1288-d72a-450e-b68a-5f1c7f1ba95b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.005042] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 902.005042] env[69992]: value = "task-2896745" [ 902.005042] env[69992]: _type = "Task" [ 902.005042] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.015809] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896745, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.098794] env[69992]: DEBUG nova.compute.manager [req-ffd333cd-ae6a-49f1-a351-ac3665e1eb4a req-db008552-0860-4e89-8235-52d81f319e19 service nova] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Received event network-vif-deleted-3ab410e0-7643-4dc4-b15e-ca8b2701aefa {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 902.222220] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.353966] env[69992]: DEBUG nova.compute.manager [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.355557] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18fe8e2-2d3f-4b9b-91d7-c72301e767e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.398953] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883c7406-357f-4d63-97f4-c452a0ae3e75 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.406892] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b61fc0-b7f0-48f4-9761-aade7e7f074c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.449972] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50d1e0f-c0cb-420a-97e7-06d7a6d25a72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.458193] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63471} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.465595] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 73e41918-88b8-4ff7-9fdd-b45ac97c80ec/73e41918-88b8-4ff7-9fdd-b45ac97c80ec.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.465828] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.466192] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896744, 'name': ReconfigVM_Task, 'duration_secs': 0.313242} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.466411] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896743, 'name': ReconfigVM_Task, 'duration_secs': 0.386828} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.466598] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ec34e19-b7b9-469f-83b1-36836b1216b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.469781] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4320f32-48a0-48fc-b460-122c314d9341 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.475811] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 902.483022] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.483022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a2d362-9272-43b8-a6f0-e3cfc701c07b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.493631] env[69992]: DEBUG nova.compute.provider_tree [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.521304] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 902.521304] env[69992]: value = "task-2896746" [ 902.521304] env[69992]: _type = "Task" [ 902.521304] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.521304] env[69992]: DEBUG nova.scheduler.client.report [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.526608] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f039d31-a475-474f-8f16-b4599232db8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.548572] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896745, 'name': Rename_Task, 'duration_secs': 0.154264} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.552500] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.553126] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896746, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.553490] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 902.553490] env[69992]: value = "task-2896747" [ 902.553490] env[69992]: _type = "Task" [ 902.553490] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.553642] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07c59555-75b3-401f-ac7f-980ba758de08 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.564466] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896747, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.565803] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 902.565803] env[69992]: value = "task-2896748" [ 902.565803] env[69992]: _type = "Task" [ 902.565803] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.574227] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.668580] env[69992]: DEBUG nova.network.neutron [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Successfully created port: 369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.874205] env[69992]: INFO nova.compute.manager [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] instance snapshotting [ 902.879402] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1049104-2e3f-4037-a2e6-ce57b5a2b68a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.903357] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d783b9-58f3-4db2-affe-6eda223bb9a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.983518] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.983518] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.983743] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.984037] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.984272] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.984516] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.984863] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.985122] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.985434] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.985707] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.986031] env[69992]: DEBUG nova.virt.hardware [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.993121] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 902.993879] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1424b81f-28b3-4415-bcf9-14597c253613 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.015136] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 903.015136] env[69992]: value = "task-2896749" [ 903.015136] env[69992]: _type = "Task" [ 903.015136] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.027180] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896749, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.043904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.830s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.046985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.553s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.047274] env[69992]: DEBUG nova.objects.instance [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lazy-loading 'resources' on Instance uuid 068507bb-ee7a-44f7-b315-7d4b2b70e735 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.054053] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076827} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.054384] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.056138] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc3c791-e945-4a7c-9014-2524e7e9f36d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.083623] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 73e41918-88b8-4ff7-9fdd-b45ac97c80ec/73e41918-88b8-4ff7-9fdd-b45ac97c80ec.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.086451] env[69992]: INFO nova.scheduler.client.report [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Deleted allocations for instance f249c0b9-ddd7-4b63-ae3a-11035764d3e5 [ 903.093985] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-995f9714-c6f8-40e9-a0e6-3c639c51c5d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.108948] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896747, 'name': ReconfigVM_Task, 'duration_secs': 0.218722} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.111874] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.116247] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da941606-085e-4775-bcf6-4cbace9108d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.119510] env[69992]: DEBUG oslo_vmware.api [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896748, 'name': PowerOnVM_Task, 'duration_secs': 0.465041} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.121173] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.121277] env[69992]: INFO nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Took 10.28 seconds to spawn the instance on the hypervisor. [ 903.122123] env[69992]: DEBUG nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.122123] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 903.122123] env[69992]: value = "task-2896750" [ 903.122123] env[69992]: _type = "Task" [ 903.122123] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.122812] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1314f0c4-08a8-4b32-95f2-e35d8f78ec57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.129256] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 903.129256] env[69992]: value = "task-2896751" [ 903.129256] env[69992]: _type = "Task" [ 903.129256] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.141979] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896750, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.147780] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896751, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.240714] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.269689] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 903.269985] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.270171] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 903.270359] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.270502] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 903.270645] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 903.270890] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 903.271024] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 903.272064] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 903.272064] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 903.272064] env[69992]: DEBUG nova.virt.hardware [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 903.272371] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d472424-d8ed-4fed-87f7-0c468ad33570 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.284342] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ffc036-dda6-45c4-af9d-df889fdcbbbe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.417013] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 903.417356] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0405f215-d7da-4f20-b8d1-bc7a37b4d961 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.424937] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 903.424937] env[69992]: value = "task-2896752" [ 903.424937] env[69992]: _type = "Task" [ 903.424937] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.443141] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896752, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.526453] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896749, 'name': ReconfigVM_Task, 'duration_secs': 0.189515} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.526885] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 903.527837] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfe81f8-d518-40f0-b628-2b8ce6528a5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.556197] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5/1d436762-964d-40d9-871e-ee33c3ba25b5.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.559732] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f18424e3-1497-484a-b57a-ff424e06653d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.582019] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 903.582019] env[69992]: value = "task-2896754" [ 903.582019] env[69992]: _type = "Task" [ 903.582019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.591478] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.624204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dfc96b3-bae5-47c0-a393-41d32a99c73e tempest-ServerDiagnosticsV248Test-1834735610 tempest-ServerDiagnosticsV248Test-1834735610-project-member] Lock "f249c0b9-ddd7-4b63-ae3a-11035764d3e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.599s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.654028] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896750, 'name': ReconfigVM_Task, 'duration_secs': 0.296615} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.654221] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.656686] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 73e41918-88b8-4ff7-9fdd-b45ac97c80ec/73e41918-88b8-4ff7-9fdd-b45ac97c80ec.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.660564] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34eb4ee5-b9c4-4ded-8d66-8ef30e68885e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.664268] env[69992]: INFO nova.compute.manager [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Took 29.81 seconds to build instance. [ 903.670549] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 903.670549] env[69992]: value = "task-2896755" [ 903.670549] env[69992]: _type = "Task" [ 903.670549] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.693182] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896755, 'name': Rename_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.938464] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896752, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.099970] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896754, 'name': ReconfigVM_Task, 'duration_secs': 0.296634} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.100272] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5/1d436762-964d-40d9-871e-ee33c3ba25b5.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.100756] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 904.144379] env[69992]: DEBUG oslo_vmware.api [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896751, 'name': PowerOnVM_Task, 'duration_secs': 0.798062} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.144878] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.149758] env[69992]: DEBUG nova.compute.manager [None req-f0cb14d5-6be7-4b9f-85fb-e3694ba9c5e5 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.151103] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3d7872-2eff-4913-b257-8c0b1da7bb79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.168022] env[69992]: DEBUG nova.compute.manager [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.168022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce086fb1-1abb-4eb5-b230-a31bee370796 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.921s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.168022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d41f0c-5228-494b-9745-91ed7131af7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.189597] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896755, 'name': Rename_Task, 'duration_secs': 0.153273} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.189707] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.190337] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-975ecc51-0824-4d4d-a87f-27307d1343c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.199872] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 904.199872] env[69992]: value = "task-2896756" [ 904.199872] env[69992]: _type = "Task" [ 904.199872] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.213414] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.215168] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e53f03-9ca2-459c-ae3a-6f48c9e054c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.230511] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f429d8-88b6-4b24-9335-6d65403ce3c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.269170] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b8d7d4-63be-4860-ad0d-a94a74c8690c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.279469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fabc62a-a8fe-4847-a4fa-6f3e766d4bc4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.300517] env[69992]: DEBUG nova.compute.provider_tree [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.439048] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896752, 'name': CreateSnapshot_Task, 'duration_secs': 0.876007} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.439415] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 904.440445] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b5440e-a94a-4a27-812e-49ba704d208e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.613650] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9491e73b-9f79-4e38-b3bf-ec8e2e04e12f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.638351] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe956af-f694-4a5e-a581-cab2c48bf61d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.677398] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 904.682383] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 904.689774] env[69992]: INFO nova.compute.manager [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] instance snapshotting [ 904.689939] env[69992]: WARNING nova.compute.manager [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 904.692833] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281a60be-b99b-4b62-a69c-c2dcecdec405 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.722967] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e083aa-f37d-4362-8e56-76be695dbeb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.750347] env[69992]: DEBUG oslo_vmware.api [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896756, 'name': PowerOnVM_Task, 'duration_secs': 0.503244} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.753079] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.753079] env[69992]: INFO nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Took 9.09 seconds to spawn the instance on the hypervisor. [ 904.753079] env[69992]: DEBUG nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.753079] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47250f94-162f-4ee3-8459-bbab7e36eb2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.807064] env[69992]: DEBUG nova.scheduler.client.report [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.827892] env[69992]: DEBUG nova.network.neutron [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Successfully updated port: 369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.966200] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 904.966500] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-de949b99-8ee2-44b4-8932-af05523a9f33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.978087] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 904.978087] env[69992]: value = "task-2896757" [ 904.978087] env[69992]: _type = "Task" [ 904.978087] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.989314] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896757, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.214234] env[69992]: DEBUG nova.compute.manager [req-f3ff2283-1aea-4c94-b8f0-cb0aab7a150f req-35e35ef4-4edc-4f4e-8ae0-f2c18c7dacfb service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Received event network-vif-plugged-369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.214234] env[69992]: DEBUG oslo_concurrency.lockutils [req-f3ff2283-1aea-4c94-b8f0-cb0aab7a150f req-35e35ef4-4edc-4f4e-8ae0-f2c18c7dacfb service nova] Acquiring lock "00b2fd0b-7841-448d-82cf-436aa8d80cda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.214234] env[69992]: DEBUG oslo_concurrency.lockutils [req-f3ff2283-1aea-4c94-b8f0-cb0aab7a150f req-35e35ef4-4edc-4f4e-8ae0-f2c18c7dacfb service nova] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.215166] env[69992]: DEBUG oslo_concurrency.lockutils [req-f3ff2283-1aea-4c94-b8f0-cb0aab7a150f req-35e35ef4-4edc-4f4e-8ae0-f2c18c7dacfb service nova] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.215402] env[69992]: DEBUG nova.compute.manager [req-f3ff2283-1aea-4c94-b8f0-cb0aab7a150f req-35e35ef4-4edc-4f4e-8ae0-f2c18c7dacfb service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] No waiting events found dispatching network-vif-plugged-369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 905.215623] env[69992]: WARNING nova.compute.manager [req-f3ff2283-1aea-4c94-b8f0-cb0aab7a150f req-35e35ef4-4edc-4f4e-8ae0-f2c18c7dacfb service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Received unexpected event network-vif-plugged-369bb007-f861-4b94-a5ac-dd9d835b0fb2 for instance with vm_state building and task_state spawning. [ 905.217273] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.255572] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 905.258016] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ab5d48d9-ba1c-4161-afb5-7e91d877ce29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.275938] env[69992]: DEBUG nova.network.neutron [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Port 2584dc71-913f-4c9b-922c-f8b28530b82f binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 905.279115] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 905.279115] env[69992]: value = "task-2896758" [ 905.279115] env[69992]: _type = "Task" [ 905.279115] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.279873] env[69992]: INFO nova.compute.manager [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Took 30.02 seconds to build instance. [ 905.293526] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896758, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.312298] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.265s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.315295] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.564s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.316640] env[69992]: INFO nova.compute.claims [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.330791] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-00b2fd0b-7841-448d-82cf-436aa8d80cda" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.330976] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-00b2fd0b-7841-448d-82cf-436aa8d80cda" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.331243] env[69992]: DEBUG nova.network.neutron [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.351030] env[69992]: INFO nova.scheduler.client.report [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Deleted allocations for instance 068507bb-ee7a-44f7-b315-7d4b2b70e735 [ 905.490978] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896757, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.785591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5813d134-992e-43c4-b810-d352ce7e3244 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.681s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.798975] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896758, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.866082] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc43c6d2-ddcc-4cc4-bbd3-2920bf00fb16 tempest-ServerPasswordTestJSON-1823579092 tempest-ServerPasswordTestJSON-1823579092-project-member] Lock "068507bb-ee7a-44f7-b315-7d4b2b70e735" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.319s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.952958] env[69992]: DEBUG nova.network.neutron [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.956235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.957574] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.003486] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896757, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.306045] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 906.315075] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.315075] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.315679] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.323896] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896758, 'name': CreateSnapshot_Task, 'duration_secs': 0.55466} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.326249] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 906.326249] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b004be8-bd66-4891-a2f1-ed4969d66cbf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.502873] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896757, 'name': CloneVM_Task, 'duration_secs': 1.30647} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.502873] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Created linked-clone VM from snapshot [ 906.502873] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5726bf32-7ede-4e9c-b3f2-fe25d22ba4aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.515076] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Uploading image 2b0409f0-6588-4643-b677-dfab84d8e552 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 906.522265] env[69992]: DEBUG nova.network.neutron [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Updating instance_info_cache with network_info: [{"id": "369bb007-f861-4b94-a5ac-dd9d835b0fb2", "address": "fa:16:3e:ab:de:fd", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bb007-f8", "ovs_interfaceid": "369bb007-f861-4b94-a5ac-dd9d835b0fb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.561377] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 906.561377] env[69992]: value = "vm-581886" [ 906.561377] env[69992]: _type = "VirtualMachine" [ 906.561377] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 906.561377] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bdcf3be8-dea7-4c24-8779-1a2d0b78ef18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.569627] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lease: (returnval){ [ 906.569627] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d5c630-c5fa-6aa0-96b1-7da2cd09ab69" [ 906.569627] env[69992]: _type = "HttpNfcLease" [ 906.569627] env[69992]: } obtained for exporting VM: (result){ [ 906.569627] env[69992]: value = "vm-581886" [ 906.569627] env[69992]: _type = "VirtualMachine" [ 906.569627] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 906.569627] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the lease: (returnval){ [ 906.569627] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d5c630-c5fa-6aa0-96b1-7da2cd09ab69" [ 906.569627] env[69992]: _type = "HttpNfcLease" [ 906.569627] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 906.581983] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 906.581983] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d5c630-c5fa-6aa0-96b1-7da2cd09ab69" [ 906.581983] env[69992]: _type = "HttpNfcLease" [ 906.581983] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 906.831616] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.854322] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 906.854617] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e2b4304f-f782-43e6-899b-ef388de64c70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.866590] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 906.866590] env[69992]: value = "task-2896761" [ 906.866590] env[69992]: _type = "Task" [ 906.866590] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.875041] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896761, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.876725] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd447aa5-f633-45be-b269-579dc0fc162e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.887393] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9090fe13-2f11-499b-b60d-2d96f5e1e3d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.922801] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1245b6-669a-4bd9-a9cf-372707562867 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.935914] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0f6a3e-97fb-435d-949f-d698588ad445 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.958701] env[69992]: DEBUG nova.compute.provider_tree [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.025622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-00b2fd0b-7841-448d-82cf-436aa8d80cda" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.026118] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance network_info: |[{"id": "369bb007-f861-4b94-a5ac-dd9d835b0fb2", "address": "fa:16:3e:ab:de:fd", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bb007-f8", "ovs_interfaceid": "369bb007-f861-4b94-a5ac-dd9d835b0fb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 907.026667] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:de:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '369bb007-f861-4b94-a5ac-dd9d835b0fb2', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.035885] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating folder: Project (b51b8195c4e7418cbdaa66aa5e5aff5b). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.037326] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0edcfe00-217f-467c-b79a-6065036f89c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.053129] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created folder: Project (b51b8195c4e7418cbdaa66aa5e5aff5b) in parent group-v581821. [ 907.053350] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating folder: Instances. Parent ref: group-v581889. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.053606] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2019111d-0879-409b-b903-eef1036faec9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.065705] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created folder: Instances in parent group-v581889. [ 907.065967] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.066162] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.066807] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8dd7f0f6-f558-4307-b01a-9e95b75357fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.091844] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 907.091844] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d5c630-c5fa-6aa0-96b1-7da2cd09ab69" [ 907.091844] env[69992]: _type = "HttpNfcLease" [ 907.091844] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 907.093274] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 907.093274] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d5c630-c5fa-6aa0-96b1-7da2cd09ab69" [ 907.093274] env[69992]: _type = "HttpNfcLease" [ 907.093274] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 907.093520] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.093520] env[69992]: value = "task-2896764" [ 907.093520] env[69992]: _type = "Task" [ 907.093520] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.094230] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4963dbcb-3508-40b8-987c-2f4e51f51bab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.109770] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524c9200-e3ef-8aa6-83c8-26ad57a036dc/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 907.109993] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524c9200-e3ef-8aa6-83c8-26ad57a036dc/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 907.111759] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896764, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.172868] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.173135] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.228127] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9028217d-8391-4df9-97a0-948d4f8c6a3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.377338] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896761, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.463225] env[69992]: DEBUG nova.scheduler.client.report [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.611328] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896764, 'name': CreateVM_Task, 'duration_secs': 0.464428} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.612363] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 907.612507] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.612749] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.613531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 907.614540] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7fa95e7-6a02-4efb-938d-fc4e3caae76e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.622890] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 907.622890] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5294b642-ff7a-856e-bea8-58d71f195961" [ 907.622890] env[69992]: _type = "Task" [ 907.622890] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.634812] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5294b642-ff7a-856e-bea8-58d71f195961, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.667986] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.669185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.669185] env[69992]: DEBUG nova.network.neutron [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.864803] env[69992]: DEBUG nova.compute.manager [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Received event network-changed-369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.869244] env[69992]: DEBUG nova.compute.manager [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Refreshing instance network info cache due to event network-changed-369bb007-f861-4b94-a5ac-dd9d835b0fb2. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 907.870162] env[69992]: DEBUG oslo_concurrency.lockutils [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] Acquiring lock "refresh_cache-00b2fd0b-7841-448d-82cf-436aa8d80cda" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.870162] env[69992]: DEBUG oslo_concurrency.lockutils [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] Acquired lock "refresh_cache-00b2fd0b-7841-448d-82cf-436aa8d80cda" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.870281] env[69992]: DEBUG nova.network.neutron [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Refreshing network info cache for port 369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.884480] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896761, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.971172] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.972462] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 907.976134] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.874s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.978935] env[69992]: INFO nova.compute.claims [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.138530] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5294b642-ff7a-856e-bea8-58d71f195961, 'name': SearchDatastore_Task, 'duration_secs': 0.015409} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.139970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.140410] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 908.140775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.141057] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.141389] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 908.141959] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83141242-de82-490d-b20b-bcdec0bf36b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.155138] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.155410] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.160535] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ffdd96-e4c8-489f-b8b3-75a70b38af85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.168742] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 908.168742] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521dcebc-51a9-862f-ebdd-d84d65a0dd63" [ 908.168742] env[69992]: _type = "Task" [ 908.168742] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.199498] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521dcebc-51a9-862f-ebdd-d84d65a0dd63, 'name': SearchDatastore_Task, 'duration_secs': 0.017107} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.201014] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5175e42e-50be-4426-9c18-3d2169537c73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.218149] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 908.218149] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5201b1da-3cba-f5c4-5386-c8d95436bbee" [ 908.218149] env[69992]: _type = "Task" [ 908.218149] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.239207] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.244028] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.245845] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5201b1da-3cba-f5c4-5386-c8d95436bbee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.392749] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896761, 'name': CloneVM_Task, 'duration_secs': 1.451754} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.392824] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Created linked-clone VM from snapshot [ 908.394478] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31369547-53b6-4889-9b74-4b1c7376f169 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.405494] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Uploading image 31ef2967-c992-4464-ba06-78513ddb83ff {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 908.436075] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 908.436075] env[69992]: value = "vm-581888" [ 908.436075] env[69992]: _type = "VirtualMachine" [ 908.436075] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 908.436357] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4c8c94b6-8c1d-404b-99e2-6f8e93052d0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.448652] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease: (returnval){ [ 908.448652] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528dec20-d8bc-876f-789b-85215e8d5324" [ 908.448652] env[69992]: _type = "HttpNfcLease" [ 908.448652] env[69992]: } obtained for exporting VM: (result){ [ 908.448652] env[69992]: value = "vm-581888" [ 908.448652] env[69992]: _type = "VirtualMachine" [ 908.448652] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 908.449362] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the lease: (returnval){ [ 908.449362] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528dec20-d8bc-876f-789b-85215e8d5324" [ 908.449362] env[69992]: _type = "HttpNfcLease" [ 908.449362] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 908.460698] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.460698] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528dec20-d8bc-876f-789b-85215e8d5324" [ 908.460698] env[69992]: _type = "HttpNfcLease" [ 908.460698] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 908.484913] env[69992]: DEBUG nova.compute.utils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 908.489032] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 908.489186] env[69992]: DEBUG nova.network.neutron [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 908.717348] env[69992]: DEBUG nova.policy [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '730c31e7d7524a5d9d7d7e905871a5e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '216d97a77775447d967e818418defd3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 908.735095] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5201b1da-3cba-f5c4-5386-c8d95436bbee, 'name': SearchDatastore_Task, 'duration_secs': 0.018091} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.737537] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.737975] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.738292] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da4c152c-68b5-4a13-a20a-73170209a1e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.749224] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 908.749224] env[69992]: value = "task-2896767" [ 908.749224] env[69992]: _type = "Task" [ 908.749224] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.763774] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.957579] env[69992]: DEBUG nova.network.neutron [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance_info_cache with network_info: [{"id": "2584dc71-913f-4c9b-922c-f8b28530b82f", "address": "fa:16:3e:42:68:21", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2584dc71-91", "ovs_interfaceid": "2584dc71-913f-4c9b-922c-f8b28530b82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.965831] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.965831] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528dec20-d8bc-876f-789b-85215e8d5324" [ 908.965831] env[69992]: _type = "HttpNfcLease" [ 908.965831] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 908.966581] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 908.966581] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528dec20-d8bc-876f-789b-85215e8d5324" [ 908.966581] env[69992]: _type = "HttpNfcLease" [ 908.966581] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 908.967980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8cc9c4-313f-421c-b8fa-b101ff10a229 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.980292] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529082c1-695c-12ba-f017-77f14d8dd347/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 908.981895] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529082c1-695c-12ba-f017-77f14d8dd347/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 909.073752] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 909.123476] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-74ad9a4a-db95-4afc-83cb-9e3bb8a47c51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.250753] env[69992]: DEBUG nova.network.neutron [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Updated VIF entry in instance network info cache for port 369bb007-f861-4b94-a5ac-dd9d835b0fb2. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.250753] env[69992]: DEBUG nova.network.neutron [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Updating instance_info_cache with network_info: [{"id": "369bb007-f861-4b94-a5ac-dd9d835b0fb2", "address": "fa:16:3e:ab:de:fd", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bb007-f8", "ovs_interfaceid": "369bb007-f861-4b94-a5ac-dd9d835b0fb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.266715] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896767, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.476080] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.759909] env[69992]: DEBUG oslo_concurrency.lockutils [req-77c19d6c-f902-4c4c-9b30-b79c3302ebe9 req-facb1433-0374-4ff3-baec-ac8de37f8a55 service nova] Releasing lock "refresh_cache-00b2fd0b-7841-448d-82cf-436aa8d80cda" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.765906] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e75afda-6f6e-4857-8960-8d800cf01184 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.774498] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725375} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.775252] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.775826] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.775826] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b40118e-845f-456c-a0d4-e181b945a4dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.781783] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572cf94c-0432-4106-8f40-d56718c262b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.786880] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 909.786880] env[69992]: value = "task-2896768" [ 909.786880] env[69992]: _type = "Task" [ 909.786880] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.819175] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfc6c1f-dcee-45f7-af3d-285d949f290e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.825715] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.832921] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fdf248-9f58-4ee1-9e59-5958d0edcfe2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.851407] env[69992]: DEBUG nova.compute.provider_tree [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.011533] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041a61ec-d3e3-4e25-899e-deeb06d993fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.041612] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a659bf-f871-4882-a21f-85918b5577a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.052686] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 910.091779] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 910.255329] env[69992]: DEBUG nova.network.neutron [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Successfully created port: d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.301307] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073241} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.301307] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.302581] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b124d2-1811-416c-9cb3-bd577075b78f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.340265] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.340265] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98398aac-8b49-438b-9aed-f702c1ef22d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.361365] env[69992]: DEBUG nova.scheduler.client.report [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.375028] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 910.375028] env[69992]: value = "task-2896770" [ 910.375028] env[69992]: _type = "Task" [ 910.375028] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.385867] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.559651] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.559651] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a13dac82-bf71-4a6c-bb4e-d24d469da232 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.569875] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 910.569875] env[69992]: value = "task-2896771" [ 910.569875] env[69992]: _type = "Task" [ 910.569875] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.579736] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.868925] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.892s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.868925] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 910.876062] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.572s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.876062] env[69992]: DEBUG nova.objects.instance [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lazy-loading 'resources' on Instance uuid 64ab568c-a2ef-4bac-8885-3dde76f9f764 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 910.897412] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.085177] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.377041] env[69992]: DEBUG nova.compute.utils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.382271] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.382271] env[69992]: DEBUG nova.network.neutron [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.415656] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.464046] env[69992]: DEBUG nova.policy [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd65ae0068ef6437ab1af36384644513a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da546e986828460e958e2eed165bf47e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.590932] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896771, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.887178] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 911.904149] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896770, 'name': ReconfigVM_Task, 'duration_secs': 1.201083} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.904299] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.905655] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-354294fa-dab9-4ef6-a7fc-715271d00366 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.914785] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 911.914785] env[69992]: value = "task-2896772" [ 911.914785] env[69992]: _type = "Task" [ 911.914785] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.952762] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896772, 'name': Rename_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.085253] env[69992]: DEBUG oslo_vmware.api [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2896771, 'name': PowerOnVM_Task, 'duration_secs': 1.037971} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.085253] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.085253] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2c5203-cf37-4753-905e-ec325f2def4b tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance '1d436762-964d-40d9-871e-ee33c3ba25b5' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 912.174792] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c45d6da-3734-42ee-8a26-d7df15416ae3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.184770] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363c050b-321d-4ccf-b146-78a08640aea2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.227346] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65853f1c-ce98-4c3f-bf3b-6efddeaea60d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.237386] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64a6bed-8ff5-4826-b0de-6803d9366cfd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.255413] env[69992]: DEBUG nova.compute.provider_tree [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.433167] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896772, 'name': Rename_Task, 'duration_secs': 0.239253} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.433501] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 912.433707] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f19c912-c7f8-4aa7-b690-b771195f8b0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.445600] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 912.445600] env[69992]: value = "task-2896774" [ 912.445600] env[69992]: _type = "Task" [ 912.445600] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.460346] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896774, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.461269] env[69992]: DEBUG nova.network.neutron [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Successfully created port: 59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.504141] env[69992]: DEBUG nova.network.neutron [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Successfully updated port: d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.760131] env[69992]: DEBUG nova.scheduler.client.report [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.905325] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 912.964850] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896774, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.008801] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "refresh_cache-a29534bf-ee12-4b94-839b-4a12659ebd3b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.009148] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired lock "refresh_cache-a29534bf-ee12-4b94-839b-4a12659ebd3b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.009389] env[69992]: DEBUG nova.network.neutron [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.271989] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.397s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.275545] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.750s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.276065] env[69992]: DEBUG nova.objects.instance [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lazy-loading 'resources' on Instance uuid 6c58c05e-9679-4e53-89e7-c7c9cb11cff0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.303039] env[69992]: INFO nova.scheduler.client.report [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Deleted allocations for instance 64ab568c-a2ef-4bac-8885-3dde76f9f764 [ 913.459677] env[69992]: DEBUG oslo_vmware.api [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896774, 'name': PowerOnVM_Task, 'duration_secs': 0.824359} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.460012] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 913.460012] env[69992]: INFO nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Took 10.22 seconds to spawn the instance on the hypervisor. [ 913.460328] env[69992]: DEBUG nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 913.461164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f200618-4fd1-4fce-a8c3-1713166fa016 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.547905] env[69992]: DEBUG nova.network.neutron [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.726813] env[69992]: DEBUG nova.network.neutron [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Updating instance_info_cache with network_info: [{"id": "d71a7a39-b1d8-4236-9da5-ebc02ecd90b6", "address": "fa:16:3e:50:f9:26", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71a7a39-b1", "ovs_interfaceid": "d71a7a39-b1d8-4236-9da5-ebc02ecd90b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.812723] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bab0d6ba-5111-477f-a042-a19a4af07f20 tempest-FloatingIPsAssociationNegativeTestJSON-502020632 tempest-FloatingIPsAssociationNegativeTestJSON-502020632-project-member] Lock "64ab568c-a2ef-4bac-8885-3dde76f9f764" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.615s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.991616] env[69992]: INFO nova.compute.manager [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Took 35.63 seconds to build instance. [ 914.231072] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Releasing lock "refresh_cache-a29534bf-ee12-4b94-839b-4a12659ebd3b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.231072] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Instance network_info: |[{"id": "d71a7a39-b1d8-4236-9da5-ebc02ecd90b6", "address": "fa:16:3e:50:f9:26", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71a7a39-b1", "ovs_interfaceid": "d71a7a39-b1d8-4236-9da5-ebc02ecd90b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 914.303490] env[69992]: DEBUG nova.network.neutron [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Successfully updated port: 59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.408628] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a6b58c-3cdf-4d29-a71f-2cc75235453c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.421895] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97297417-58f5-435d-a9b8-f02afa91e591 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.467550] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea08981-2f46-445b-809a-4901ee2b4c6d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.479897] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bf968a-8f8d-4197-ad71-4c8dcfdac25d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.498434] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2d89594a-6c9c-4661-ab00-d21dad5652c8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.624s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.499303] env[69992]: DEBUG nova.compute.provider_tree [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.814733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.814733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.814733] env[69992]: DEBUG nova.network.neutron [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.003281] env[69992]: DEBUG nova.scheduler.client.report [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.006999] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 915.281072] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.281422] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.352144] env[69992]: DEBUG nova.network.neutron [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.506053] env[69992]: DEBUG nova.network.neutron [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [{"id": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "address": "fa:16:3e:e6:8b:79", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bdab57-cc", "ovs_interfaceid": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.510980] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.517706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.059s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.518651] env[69992]: INFO nova.compute.claims [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.533535] env[69992]: INFO nova.scheduler.client.report [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Deleted allocations for instance 6c58c05e-9679-4e53-89e7-c7c9cb11cff0 [ 915.538914] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.790122] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.790495] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.790659] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.790912] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.791197] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.791417] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 915.791569] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 915.791717] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.009650] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.010101] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance network_info: |[{"id": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "address": "fa:16:3e:e6:8b:79", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bdab57-cc", "ovs_interfaceid": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.042934] env[69992]: DEBUG oslo_concurrency.lockutils [None req-834c3049-3c95-48f5-900b-2bfac3d8784e tempest-ServerExternalEventsTest-1204883010 tempest-ServerExternalEventsTest-1204883010-project-member] Lock "6c58c05e-9679-4e53-89e7-c7c9cb11cff0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.696s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.263317] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.263728] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.263783] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.263942] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.264105] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.264273] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.264491] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.264659] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.264839] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.265158] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.265240] env[69992]: DEBUG nova.virt.hardware [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.267448] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d11ae3-bc70-42eb-9d8f-3a03d1f635d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.280828] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.281091] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.281258] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.281430] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.281574] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.281743] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.281911] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.282087] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.282292] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.282417] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.282625] env[69992]: DEBUG nova.virt.hardware [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.284674] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e33b18-2784-4bb9-8bdc-b2f084c3624f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.291126] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ece4a2-c743-4526-9fe7-72a85d412827 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.298696] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.300795] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524c9200-e3ef-8aa6-83c8-26ad57a036dc/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 916.303647] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268473fe-fd69-4aa7-a813-c41a964634a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.320920] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:f9:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd71a7a39-b1d8-4236-9da5-ebc02ecd90b6', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.328442] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.330711] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529082c1-695c-12ba-f017-77f14d8dd347/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 916.332372] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b403741b-b809-414f-8f09-a9fb93e38848 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.336263] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.340160] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f3c2ca-a320-4600-bb15-fa8fdac42fd6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.341276] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4ee3a2f-2870-4428-a120-25df2340ad3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.356357] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524c9200-e3ef-8aa6-83c8-26ad57a036dc/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 916.356533] env[69992]: ERROR oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524c9200-e3ef-8aa6-83c8-26ad57a036dc/disk-0.vmdk due to incomplete transfer. [ 916.357497] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3f7d5ff1-ed66-4b01-9ee8-038a4bfb542a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.370149] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:8b:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59bdab57-cc8d-40d6-90b5-e7582a1f1500', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.378584] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.378883] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529082c1-695c-12ba-f017-77f14d8dd347/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 916.379046] env[69992]: ERROR oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529082c1-695c-12ba-f017-77f14d8dd347/disk-0.vmdk due to incomplete transfer. [ 916.380848] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.381072] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99514823-e92b-406f-8cc4-f75f759b2d57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.382869] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.382869] env[69992]: value = "task-2896776" [ 916.382869] env[69992]: _type = "Task" [ 916.382869] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.383126] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d126d906-d9b4-45fc-b7d9-befbfb833515 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.403421] env[69992]: DEBUG oslo_vmware.rw_handles [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524c9200-e3ef-8aa6-83c8-26ad57a036dc/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 916.403636] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Uploaded image 2b0409f0-6588-4643-b677-dfab84d8e552 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 916.405907] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 916.406557] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ddab5737-abe5-41eb-9de0-a1c964fbbec7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.409397] env[69992]: DEBUG oslo_vmware.rw_handles [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529082c1-695c-12ba-f017-77f14d8dd347/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 916.409625] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Uploaded image 31ef2967-c992-4464-ba06-78513ddb83ff to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 916.411045] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 916.412599] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-22f3b26d-14a3-44a4-ac62-239918dac751 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.414122] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.414122] env[69992]: value = "task-2896777" [ 916.414122] env[69992]: _type = "Task" [ 916.414122] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.417912] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896776, 'name': CreateVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.422179] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 916.422179] env[69992]: value = "task-2896778" [ 916.422179] env[69992]: _type = "Task" [ 916.422179] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.427145] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 916.427145] env[69992]: value = "task-2896779" [ 916.427145] env[69992]: _type = "Task" [ 916.427145] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.433218] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896777, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.436655] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896778, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.443446] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896779, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.913279] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896776, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.933740] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896777, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.939482] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896778, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.947400] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896779, 'name': Destroy_Task, 'duration_secs': 0.366092} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.947702] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Destroyed the VM [ 916.948040] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 916.948301] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a02bbf87-557e-4405-9ab4-21c14a00e160 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.959744] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 916.959744] env[69992]: value = "task-2896780" [ 916.959744] env[69992]: _type = "Task" [ 916.959744] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.969479] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896780, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.049971] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de5498a-3322-4bf9-8724-4266bfcc14e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.058519] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6cdb0d-70a9-49b4-b18f-3f019060bc8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.095141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8ac442-28fd-481d-ae01-935c8a2cfab8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.104345] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0388ae9c-cbb4-42d2-a12b-e3f8f6a5faf6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.119260] env[69992]: DEBUG nova.compute.provider_tree [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.410365] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896776, 'name': CreateVM_Task, 'duration_secs': 0.550728} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.410470] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.411183] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.411347] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.411666] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.411915] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-646eeb32-37ab-4a3b-9126-4e2feff6544b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.419019] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 917.419019] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c11d01-bc5c-4997-95d5-78fa9d3b4a85" [ 917.419019] env[69992]: _type = "Task" [ 917.419019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.433491] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896777, 'name': CreateVM_Task, 'duration_secs': 0.528875} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.433704] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c11d01-bc5c-4997-95d5-78fa9d3b4a85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.434323] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.435023] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.440627] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896778, 'name': Destroy_Task, 'duration_secs': 0.572975} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.440627] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Destroyed the VM [ 917.440627] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 917.440627] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2181c05c-9153-452d-81c5-f21981ea7316 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.448366] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 917.448366] env[69992]: value = "task-2896781" [ 917.448366] env[69992]: _type = "Task" [ 917.448366] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.457059] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896781, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.469101] env[69992]: DEBUG oslo_vmware.api [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896780, 'name': RemoveSnapshot_Task, 'duration_secs': 0.3451} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.469466] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 917.469613] env[69992]: INFO nova.compute.manager [None req-4af9e9ab-c62b-4da9-a858-ca489247122f tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Took 12.78 seconds to snapshot the instance on the hypervisor. [ 917.584701] env[69992]: DEBUG nova.compute.manager [req-05690873-fc6d-447a-b42f-cf15bbfb9554 req-1df48f00-4953-4e5e-9675-4b1c58f49c33 service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Received event network-vif-plugged-d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.584979] env[69992]: DEBUG oslo_concurrency.lockutils [req-05690873-fc6d-447a-b42f-cf15bbfb9554 req-1df48f00-4953-4e5e-9675-4b1c58f49c33 service nova] Acquiring lock "a29534bf-ee12-4b94-839b-4a12659ebd3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.585274] env[69992]: DEBUG oslo_concurrency.lockutils [req-05690873-fc6d-447a-b42f-cf15bbfb9554 req-1df48f00-4953-4e5e-9675-4b1c58f49c33 service nova] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.585671] env[69992]: DEBUG oslo_concurrency.lockutils [req-05690873-fc6d-447a-b42f-cf15bbfb9554 req-1df48f00-4953-4e5e-9675-4b1c58f49c33 service nova] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.585905] env[69992]: DEBUG nova.compute.manager [req-05690873-fc6d-447a-b42f-cf15bbfb9554 req-1df48f00-4953-4e5e-9675-4b1c58f49c33 service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] No waiting events found dispatching network-vif-plugged-d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 917.586151] env[69992]: WARNING nova.compute.manager [req-05690873-fc6d-447a-b42f-cf15bbfb9554 req-1df48f00-4953-4e5e-9675-4b1c58f49c33 service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Received unexpected event network-vif-plugged-d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 for instance with vm_state building and task_state spawning. [ 917.627176] env[69992]: DEBUG nova.scheduler.client.report [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.938826] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c11d01-bc5c-4997-95d5-78fa9d3b4a85, 'name': SearchDatastore_Task, 'duration_secs': 0.036947} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.939270] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.939542] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.939795] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.939970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.941230] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.942396] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.945200] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.945200] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b830c98a-e559-4eb9-9be3-a8accf5a0027 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.946787] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9ad278c-ce8b-40d5-b952-a08bcef57adc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.959835] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 917.959835] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524457da-7590-5acd-fc17-5baea4b7b595" [ 917.959835] env[69992]: _type = "Task" [ 917.959835] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.965381] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.965797] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.967407] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896781, 'name': RemoveSnapshot_Task} progress is 43%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.973916] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef8f2b3a-185e-4aa9-ad78-b28560403314 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.998215] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 917.998215] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526adcf4-4bdd-7fd5-78e5-631475adfe0a" [ 917.998215] env[69992]: _type = "Task" [ 917.998215] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.998896] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524457da-7590-5acd-fc17-5baea4b7b595, 'name': SearchDatastore_Task, 'duration_secs': 0.014466} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.000151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.000649] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.001261] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.015454] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526adcf4-4bdd-7fd5-78e5-631475adfe0a, 'name': SearchDatastore_Task, 'duration_secs': 0.012604} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.018088] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fe55a34-b2c8-4a45-b320-b7325c69d63e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.024415] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 918.024415] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528921f4-59a0-76c8-ff35-f72821c44c4c" [ 918.024415] env[69992]: _type = "Task" [ 918.024415] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.034839] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528921f4-59a0-76c8-ff35-f72821c44c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.130412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.134268] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.137728] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.794s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.139590] env[69992]: INFO nova.compute.claims [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.273305] env[69992]: DEBUG nova.compute.manager [req-c8196a6d-a1c8-4b8c-8cc4-8aac2052e602 req-faf0b9fe-9b8b-4a3c-baba-120d4a1954f2 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Received event network-vif-plugged-59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.273305] env[69992]: DEBUG oslo_concurrency.lockutils [req-c8196a6d-a1c8-4b8c-8cc4-8aac2052e602 req-faf0b9fe-9b8b-4a3c-baba-120d4a1954f2 service nova] Acquiring lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.273305] env[69992]: DEBUG oslo_concurrency.lockutils [req-c8196a6d-a1c8-4b8c-8cc4-8aac2052e602 req-faf0b9fe-9b8b-4a3c-baba-120d4a1954f2 service nova] Lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.273305] env[69992]: DEBUG oslo_concurrency.lockutils [req-c8196a6d-a1c8-4b8c-8cc4-8aac2052e602 req-faf0b9fe-9b8b-4a3c-baba-120d4a1954f2 service nova] Lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.273475] env[69992]: DEBUG nova.compute.manager [req-c8196a6d-a1c8-4b8c-8cc4-8aac2052e602 req-faf0b9fe-9b8b-4a3c-baba-120d4a1954f2 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] No waiting events found dispatching network-vif-plugged-59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 918.273512] env[69992]: WARNING nova.compute.manager [req-c8196a6d-a1c8-4b8c-8cc4-8aac2052e602 req-faf0b9fe-9b8b-4a3c-baba-120d4a1954f2 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Received unexpected event network-vif-plugged-59bdab57-cc8d-40d6-90b5-e7582a1f1500 for instance with vm_state building and task_state spawning. [ 918.461862] env[69992]: DEBUG oslo_vmware.api [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896781, 'name': RemoveSnapshot_Task, 'duration_secs': 0.845597} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.463420] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 918.463731] env[69992]: INFO nova.compute.manager [None req-cd54447b-7de9-4197-b8f2-cf9cb6af4b6a tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Took 15.59 seconds to snapshot the instance on the hypervisor. [ 918.537531] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528921f4-59a0-76c8-ff35-f72821c44c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.011873} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.538114] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.538689] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a29534bf-ee12-4b94-839b-4a12659ebd3b/a29534bf-ee12-4b94-839b-4a12659ebd3b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.539149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.539539] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.540524] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2738a77e-6b18-4288-a908-5aef3b74f95b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.543327] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92c37239-f273-4bf6-9e55-936c2a7e892a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.553851] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 918.553851] env[69992]: value = "task-2896782" [ 918.553851] env[69992]: _type = "Task" [ 918.553851] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.563026] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.563026] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.563026] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2fb88ab-c4be-4482-95ab-5bc623d3f1ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.568391] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896782, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.573234] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 918.573234] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b72249-bea6-d17f-db8b-42774fc06e58" [ 918.573234] env[69992]: _type = "Task" [ 918.573234] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.583300] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b72249-bea6-d17f-db8b-42774fc06e58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.645084] env[69992]: DEBUG nova.compute.utils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 918.649569] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 918.649963] env[69992]: DEBUG nova.network.neutron [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 918.883175] env[69992]: DEBUG nova.policy [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5413fa13e7d146fc9ddff303ecb78b80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f913fc9925114c7296516f0236b74732', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 918.917571] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.917977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.948406] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.950367] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.974956] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "1d436762-964d-40d9-871e-ee33c3ba25b5" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.977416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.977416] env[69992]: DEBUG nova.compute.manager [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Going to confirm migration 1 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 918.984771] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.985297] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.069195] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896782, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.088230] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b72249-bea6-d17f-db8b-42774fc06e58, 'name': SearchDatastore_Task, 'duration_secs': 0.019155} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.090129] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b289953-8b73-475a-9949-034dc2208560 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.099317] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 919.099317] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e26c0c-673a-a393-8107-4e918fdc8ce6" [ 919.099317] env[69992]: _type = "Task" [ 919.099317] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.111919] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e26c0c-673a-a393-8107-4e918fdc8ce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.150926] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.566867] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896782, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.786448} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.567182] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a29534bf-ee12-4b94-839b-4a12659ebd3b/a29534bf-ee12-4b94-839b-4a12659ebd3b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.567435] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.570109] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb188fc0-e371-452e-8a71-ad984e201f49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.578093] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 919.578093] env[69992]: value = "task-2896783" [ 919.578093] env[69992]: _type = "Task" [ 919.578093] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.589668] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896783, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.614395] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e26c0c-673a-a393-8107-4e918fdc8ce6, 'name': SearchDatastore_Task, 'duration_secs': 0.057615} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.617192] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.617469] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] bf75484e-4020-48f7-9419-bd88d0462b90/bf75484e-4020-48f7-9419-bd88d0462b90.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.617953] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec897a70-b787-453f-a2dd-ab9f856662b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.625967] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 919.625967] env[69992]: value = "task-2896784" [ 919.625967] env[69992]: _type = "Task" [ 919.625967] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.638791] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.705937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.705937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.705937] env[69992]: DEBUG nova.network.neutron [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.705937] env[69992]: DEBUG nova.objects.instance [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lazy-loading 'info_cache' on Instance uuid 1d436762-964d-40d9-871e-ee33c3ba25b5 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.732320] env[69992]: DEBUG nova.network.neutron [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Successfully created port: a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.738432] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b49f86-e1d2-46da-a4bd-247bf0e8451a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.749969] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6528b9dc-22aa-4989-8daa-9d4e0fbf0f82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.782405] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b2a353-57f1-4b7a-a557-78234c223c42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.794022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5048bb49-6bc7-4428-ab91-b16b44a1c5f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.808327] env[69992]: DEBUG nova.compute.provider_tree [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.090662] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896783, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069948} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.090941] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.093703] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adb06cf-a347-4681-aac8-62d0bc52e160 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.120959] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] a29534bf-ee12-4b94-839b-4a12659ebd3b/a29534bf-ee12-4b94-839b-4a12659ebd3b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.121394] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edefee6e-e438-4263-8008-6e9b7bcd7428 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.151021] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896784, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.154757] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 920.154757] env[69992]: value = "task-2896785" [ 920.154757] env[69992]: _type = "Task" [ 920.154757] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.164874] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.168296] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896785, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.190305] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquiring lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.190697] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.202371] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.202749] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.202819] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.202928] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.203085] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.203239] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.203456] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.203620] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.203790] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.204015] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.204228] env[69992]: DEBUG nova.virt.hardware [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.206346] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a2a2ac-b607-4f8d-a4d8-61edb008b691 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.223021] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a39ac06-8958-45f6-8874-261abf61a190 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.312316] env[69992]: DEBUG nova.scheduler.client.report [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.650527] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.809615} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.651067] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] bf75484e-4020-48f7-9419-bd88d0462b90/bf75484e-4020-48f7-9419-bd88d0462b90.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.651478] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.654019] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64614eb3-44b9-4c05-af98-a044df970bde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.660702] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 920.660702] env[69992]: value = "task-2896786" [ 920.660702] env[69992]: _type = "Task" [ 920.660702] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.664480] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896785, 'name': ReconfigVM_Task, 'duration_secs': 0.480878} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.668534] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Reconfigured VM instance instance-00000016 to attach disk [datastore2] a29534bf-ee12-4b94-839b-4a12659ebd3b/a29534bf-ee12-4b94-839b-4a12659ebd3b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 920.669531] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5dac878d-727d-4c21-ae9a-78446914315d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.677439] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896786, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.679550] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 920.679550] env[69992]: value = "task-2896787" [ 920.679550] env[69992]: _type = "Task" [ 920.679550] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.689332] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896787, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.823025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.823025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.945s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.827476] env[69992]: INFO nova.compute.claims [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.864680] env[69992]: DEBUG nova.compute.manager [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Received event network-changed-d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.864680] env[69992]: DEBUG nova.compute.manager [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Refreshing instance network info cache due to event network-changed-d71a7a39-b1d8-4236-9da5-ebc02ecd90b6. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 920.864787] env[69992]: DEBUG oslo_concurrency.lockutils [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] Acquiring lock "refresh_cache-a29534bf-ee12-4b94-839b-4a12659ebd3b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.864931] env[69992]: DEBUG oslo_concurrency.lockutils [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] Acquired lock "refresh_cache-a29534bf-ee12-4b94-839b-4a12659ebd3b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.865076] env[69992]: DEBUG nova.network.neutron [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Refreshing network info cache for port d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.068966] env[69992]: DEBUG nova.network.neutron [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance_info_cache with network_info: [{"id": "2584dc71-913f-4c9b-922c-f8b28530b82f", "address": "fa:16:3e:42:68:21", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.191", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2584dc71-91", "ovs_interfaceid": "2584dc71-913f-4c9b-922c-f8b28530b82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.174722] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896786, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132182} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.175125] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.175949] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdfbe9c-b8bc-4078-abfc-c6eba84a41bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.909519] env[69992]: DEBUG nova.network.neutron [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Successfully updated port: a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.911109] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "9d34d8d9-4f9d-4417-90e0-f38d460bfe63" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.911337] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "9d34d8d9-4f9d-4417-90e0-f38d460bfe63" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.915843] env[69992]: INFO nova.compute.manager [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Rebuilding instance [ 921.917661] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-1d436762-964d-40d9-871e-ee33c3ba25b5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.917904] env[69992]: DEBUG nova.objects.instance [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lazy-loading 'migration_context' on Instance uuid 1d436762-964d-40d9-871e-ee33c3ba25b5 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.931111] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] bf75484e-4020-48f7-9419-bd88d0462b90/bf75484e-4020-48f7-9419-bd88d0462b90.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.932365] env[69992]: DEBUG nova.compute.manager [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Received event network-changed-59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.932579] env[69992]: DEBUG nova.compute.manager [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Refreshing instance network info cache due to event network-changed-59bdab57-cc8d-40d6-90b5-e7582a1f1500. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 921.932737] env[69992]: DEBUG oslo_concurrency.lockutils [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] Acquiring lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.932876] env[69992]: DEBUG oslo_concurrency.lockutils [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] Acquired lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.933055] env[69992]: DEBUG nova.network.neutron [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Refreshing network info cache for port 59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.937938] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-daf8edef-4234-46d5-bad8-1685957c5aa5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.960416] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896787, 'name': Rename_Task, 'duration_secs': 0.156796} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.965859] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.966294] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 921.966294] env[69992]: value = "task-2896791" [ 921.966294] env[69992]: _type = "Task" [ 921.966294] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.967033] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d1d0d4f-5de4-454b-9e97-6db88420a3b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.980626] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896791, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.983101] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 921.983101] env[69992]: value = "task-2896792" [ 921.983101] env[69992]: _type = "Task" [ 921.983101] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.999242] env[69992]: DEBUG nova.compute.manager [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.003303] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74a38bb-036f-4e8f-8ccd-c6b4066c57db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.006598] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896792, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.138468] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.138725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.436138] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "9d34d8d9-4f9d-4417-90e0-f38d460bfe63" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.524s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.439025] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 922.447646] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "refresh_cache-1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.448213] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquired lock "refresh_cache-1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.448611] env[69992]: DEBUG nova.network.neutron [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.475371] env[69992]: DEBUG nova.objects.base [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Object Instance<1d436762-964d-40d9-871e-ee33c3ba25b5> lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 922.477449] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053b45f6-219d-41ac-8862-70e20a90dc80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.487195] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896791, 'name': ReconfigVM_Task, 'duration_secs': 0.382286} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.487195] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Reconfigured VM instance instance-00000017 to attach disk [datastore2] bf75484e-4020-48f7-9419-bd88d0462b90/bf75484e-4020-48f7-9419-bd88d0462b90.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.491474] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec0cdd27-7cbd-4669-ba27-a442f0a2a662 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.511685] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f6c7cda-178b-4466-b55e-ca8d8f53762f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.522408] env[69992]: DEBUG oslo_vmware.api [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896792, 'name': PowerOnVM_Task, 'duration_secs': 0.500911} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.524669] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.525034] env[69992]: INFO nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Took 12.43 seconds to spawn the instance on the hypervisor. [ 922.525340] env[69992]: DEBUG nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.525794] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 922.525794] env[69992]: value = "task-2896793" [ 922.525794] env[69992]: _type = "Task" [ 922.525794] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.529299] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12351135-024f-4838-a85c-61904ca91bc2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.537439] env[69992]: DEBUG oslo_vmware.api [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 922.537439] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52678950-23bb-7cad-af4c-703c8cf62bf3" [ 922.537439] env[69992]: _type = "Task" [ 922.537439] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.547000] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896793, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.559513] env[69992]: DEBUG oslo_vmware.api [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52678950-23bb-7cad-af4c-703c8cf62bf3, 'name': SearchDatastore_Task, 'duration_secs': 0.013794} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.559789] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.602430] env[69992]: DEBUG nova.network.neutron [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Updated VIF entry in instance network info cache for port d71a7a39-b1d8-4236-9da5-ebc02ecd90b6. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.602430] env[69992]: DEBUG nova.network.neutron [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Updating instance_info_cache with network_info: [{"id": "d71a7a39-b1d8-4236-9da5-ebc02ecd90b6", "address": "fa:16:3e:50:f9:26", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71a7a39-b1", "ovs_interfaceid": "d71a7a39-b1d8-4236-9da5-ebc02ecd90b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.603431] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.603431] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.603431] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.603431] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.003s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.603925] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.607074] env[69992]: INFO nova.compute.manager [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Terminating instance [ 922.752377] env[69992]: DEBUG nova.network.neutron [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updated VIF entry in instance network info cache for port 59bdab57-cc8d-40d6-90b5-e7582a1f1500. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.752742] env[69992]: DEBUG nova.network.neutron [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [{"id": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "address": "fa:16:3e:e6:8b:79", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bdab57-cc", "ovs_interfaceid": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.954247] env[69992]: DEBUG nova.compute.utils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 922.962021] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 922.962021] env[69992]: DEBUG nova.network.neutron [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 923.004105] env[69992]: DEBUG nova.network.neutron [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.030293] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.030367] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f4c364e-7cac-483b-9cb8-da4f35234fc9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.035151] env[69992]: DEBUG nova.policy [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6da1f202e65e4c7481072936fa575200', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1606051f4de9477381d3b1bcea697f5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 923.047701] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896793, 'name': Rename_Task, 'duration_secs': 0.20229} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.049411] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 923.049445] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 923.049445] env[69992]: value = "task-2896794" [ 923.049445] env[69992]: _type = "Task" [ 923.049445] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.055249] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c959f9f-757b-4177-9abc-2015ffe630e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.079848] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896794, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.083722] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 923.083722] env[69992]: value = "task-2896795" [ 923.083722] env[69992]: _type = "Task" [ 923.083722] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.083722] env[69992]: INFO nova.compute.manager [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Took 41.36 seconds to build instance. [ 923.098153] env[69992]: DEBUG oslo_concurrency.lockutils [req-f316841f-4275-441b-8426-876700a7a2e2 req-4d27f823-1a36-453c-a8f8-730017f8c94b service nova] Releasing lock "refresh_cache-a29534bf-ee12-4b94-839b-4a12659ebd3b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.098655] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896795, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.110862] env[69992]: DEBUG nova.compute.manager [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 923.111390] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.112549] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d829d4c4-94e7-458d-b74a-a0bc7f97ba8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.121102] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3e5a66-86f7-4967-9e40-770e39c5dc6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.138805] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af248000-5fe9-4e04-a7cb-eb0547d3ebe5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.142934] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.144861] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf50efea-94c7-44c4-9881-3617baaf0d04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.185400] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736f7539-cd59-4db8-ba21-ceee28c854a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.197540] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccfd92a-9d36-4b2d-aaa5-62fadd94e4f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.216062] env[69992]: DEBUG nova.compute.provider_tree [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.234193] env[69992]: DEBUG nova.network.neutron [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Updating instance_info_cache with network_info: [{"id": "a83e70ea-6bd6-4317-a2fd-5170e25fba56", "address": "fa:16:3e:f5:a3:fb", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.100", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa83e70ea-6b", "ovs_interfaceid": "a83e70ea-6bd6-4317-a2fd-5170e25fba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.239021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 923.239021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 923.239021] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleting the datastore file [datastore1] 9bab6bf7-43c8-4cc3-b484-4472f1acdf45 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.239021] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b58d9db-1eef-4a70-9bcd-7c9f401aacce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.247046] env[69992]: DEBUG oslo_vmware.api [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 923.247046] env[69992]: value = "task-2896797" [ 923.247046] env[69992]: _type = "Task" [ 923.247046] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.258670] env[69992]: DEBUG oslo_concurrency.lockutils [req-97d55d2d-c3b0-48c2-8a6b-78911f4909c0 req-1776c9df-ffde-4de6-937c-366db96dcd57 service nova] Releasing lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.259400] env[69992]: DEBUG oslo_vmware.api [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.457202] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 923.574947] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896794, 'name': PowerOffVM_Task, 'duration_secs': 0.317261} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.575376] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.575642] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.576544] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1cb599-37de-41bf-bca0-c4801addfd54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.587472] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.591428] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06edee1b-c9a1-48ef-9746-0c27a5915387 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.593860] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bd54e6dc-748d-4749-b742-22150e9ec356 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.649s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.603278] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896795, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.713737] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 923.714036] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 923.714158] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore2] 00b2fd0b-7841-448d-82cf-436aa8d80cda {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.714441] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d5cbb8d-5380-4a7e-9938-45e14ffbd51a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.719701] env[69992]: DEBUG nova.scheduler.client.report [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.724405] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 923.724405] env[69992]: value = "task-2896799" [ 923.724405] env[69992]: _type = "Task" [ 923.724405] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.734440] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.735341] env[69992]: DEBUG nova.network.neutron [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Successfully created port: 13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.738650] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Releasing lock "refresh_cache-1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.738986] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Instance network_info: |[{"id": "a83e70ea-6bd6-4317-a2fd-5170e25fba56", "address": "fa:16:3e:f5:a3:fb", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.100", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa83e70ea-6b", "ovs_interfaceid": "a83e70ea-6bd6-4317-a2fd-5170e25fba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 923.739746] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:a3:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a83e70ea-6bd6-4317-a2fd-5170e25fba56', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.748108] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Creating folder: Project (f913fc9925114c7296516f0236b74732). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.748860] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4a9803f-7dcc-495c-a008-c082dd6c0f34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.763532] env[69992]: DEBUG oslo_vmware.api [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2896797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196406} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.763532] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.763532] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.763532] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.763532] env[69992]: INFO nova.compute.manager [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Took 0.65 seconds to destroy the instance on the hypervisor. [ 923.763958] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.764826] env[69992]: DEBUG nova.compute.manager [-] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 923.764937] env[69992]: DEBUG nova.network.neutron [-] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.770675] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Created folder: Project (f913fc9925114c7296516f0236b74732) in parent group-v581821. [ 923.770879] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Creating folder: Instances. Parent ref: group-v581898. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.772169] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f777fe93-59fb-4e9b-85a2-b9bf2b48bf40 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.788551] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Created folder: Instances in parent group-v581898. [ 923.788551] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.788551] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.788551] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91ba92a5-3c6a-4115-b335-8eb4946d2ea8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.814529] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.814529] env[69992]: value = "task-2896803" [ 923.814529] env[69992]: _type = "Task" [ 923.814529] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.823339] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896803, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.912905] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "62936d27-5405-4d29-b3ff-c4d8a74ba440" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.913086] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.097603] env[69992]: DEBUG oslo_vmware.api [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896795, 'name': PowerOnVM_Task, 'duration_secs': 0.68563} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.098192] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 924.098818] env[69992]: INFO nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Took 11.19 seconds to spawn the instance on the hypervisor. [ 924.098818] env[69992]: DEBUG nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 924.099543] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.102902] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4687df-f038-4cea-aa00-8cd8a63212cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.226512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.404s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.227119] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 924.231790] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.654s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.232152] env[69992]: INFO nova.compute.claims [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.247932] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199188} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.248447] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.248699] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.248917] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.327372] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896803, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.472620] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 924.480055] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "2b1a0943-d59a-441d-a2e6-8149106803b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.480406] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.493700] env[69992]: DEBUG nova.compute.manager [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Received event network-vif-plugged-a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.493700] env[69992]: DEBUG oslo_concurrency.lockutils [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] Acquiring lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.493700] env[69992]: DEBUG oslo_concurrency.lockutils [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.493700] env[69992]: DEBUG oslo_concurrency.lockutils [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.493700] env[69992]: DEBUG nova.compute.manager [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] No waiting events found dispatching network-vif-plugged-a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 924.494079] env[69992]: WARNING nova.compute.manager [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Received unexpected event network-vif-plugged-a83e70ea-6bd6-4317-a2fd-5170e25fba56 for instance with vm_state building and task_state spawning. [ 924.494079] env[69992]: DEBUG nova.compute.manager [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Received event network-changed-a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.494217] env[69992]: DEBUG nova.compute.manager [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Refreshing instance network info cache due to event network-changed-a83e70ea-6bd6-4317-a2fd-5170e25fba56. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 924.494419] env[69992]: DEBUG oslo_concurrency.lockutils [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] Acquiring lock "refresh_cache-1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.494532] env[69992]: DEBUG oslo_concurrency.lockutils [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] Acquired lock "refresh_cache-1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.495741] env[69992]: DEBUG nova.network.neutron [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Refreshing network info cache for port a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.510087] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.510728] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.511083] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.513422] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.513637] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.513823] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.514065] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.514278] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.514401] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.514570] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.514745] env[69992]: DEBUG nova.virt.hardware [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.516726] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cde528-0334-4730-a742-51647e019d8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.526213] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7a8757-e4f5-475f-8ce5-d31d97cbc7b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.585630] env[69992]: DEBUG nova.network.neutron [-] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.628568] env[69992]: INFO nova.compute.manager [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Took 39.55 seconds to build instance. [ 924.630225] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.745140] env[69992]: DEBUG nova.compute.utils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 924.746972] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 924.746972] env[69992]: DEBUG nova.network.neutron [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 924.826018] env[69992]: DEBUG nova.policy [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c3b862089a44a5a0aa4df1353e6630', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '034ac686ad0d438cbe7e56c546f87505', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 924.845972] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896803, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.862816] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "1d5722e1-5a48-4212-bbc7-527a3739db6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.863092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.863314] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "1d5722e1-5a48-4212-bbc7-527a3739db6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.863509] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.863696] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.870020] env[69992]: INFO nova.compute.manager [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Terminating instance [ 924.959909] env[69992]: DEBUG nova.compute.manager [req-c429af1a-bde4-4fa8-9473-22d91993e56e req-ebc17aca-b384-4e85-9c6f-6b85fee1bdf8 service nova] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Received event network-vif-deleted-73053414-72bf-473f-8a22-4e100e8ced17 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.090306] env[69992]: INFO nova.compute.manager [-] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Took 1.33 seconds to deallocate network for instance. [ 925.131389] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b0355740-bf8c-4c40-a37d-38bca170b95e tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.885s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.247329] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 925.291098] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 925.291385] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.291513] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 925.291721] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.291827] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 925.291969] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 925.292196] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 925.292354] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 925.292522] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 925.292684] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 925.292857] env[69992]: DEBUG nova.virt.hardware [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 925.293809] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3b46d0-117e-4d0e-bc0f-6b462f3d38dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.307660] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834cdfc3-c32f-4f98-9877-ac7d8ab74a4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.328793] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:de:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '369bb007-f861-4b94-a5ac-dd9d835b0fb2', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 925.337624] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 925.347053] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 925.348619] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d08f57cb-f57a-4fc0-802a-6530fbaeaaec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.372775] env[69992]: DEBUG nova.compute.manager [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 925.373085] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.380329] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ce8f4e-c7be-4416-b684-0a5b07091c07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.383696] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896803, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.387020] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 925.387020] env[69992]: value = "task-2896804" [ 925.387020] env[69992]: _type = "Task" [ 925.387020] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.390671] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.391345] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fbb00d3-df44-4898-8a5e-8dcb396c057c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.399550] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896804, 'name': CreateVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.413026] env[69992]: DEBUG oslo_vmware.api [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 925.413026] env[69992]: value = "task-2896805" [ 925.413026] env[69992]: _type = "Task" [ 925.413026] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.429406] env[69992]: DEBUG oslo_vmware.api [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.429406] env[69992]: DEBUG nova.network.neutron [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Successfully created port: be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.601182] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.611169] env[69992]: DEBUG nova.network.neutron [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Updated VIF entry in instance network info cache for port a83e70ea-6bd6-4317-a2fd-5170e25fba56. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.611741] env[69992]: DEBUG nova.network.neutron [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Updating instance_info_cache with network_info: [{"id": "a83e70ea-6bd6-4317-a2fd-5170e25fba56", "address": "fa:16:3e:f5:a3:fb", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.100", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa83e70ea-6b", "ovs_interfaceid": "a83e70ea-6bd6-4317-a2fd-5170e25fba56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.638537] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 925.841697] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896803, 'name': CreateVM_Task, 'duration_secs': 1.572899} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.841872] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.842784] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.842784] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.843565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.847427] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-052c9b5d-e3e1-4fc3-ad2e-1afec78fd8d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.850404] env[69992]: DEBUG nova.network.neutron [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Successfully updated port: 13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.856894] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 925.856894] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c5397f-d8f4-f2f4-bb02-92d6a702ee66" [ 925.856894] env[69992]: _type = "Task" [ 925.856894] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.875528] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c5397f-d8f4-f2f4-bb02-92d6a702ee66, 'name': SearchDatastore_Task, 'duration_secs': 0.014321} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.876030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.876294] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.877037] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.877037] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.877165] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.877484] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fde58221-01a1-4aaa-b7ee-eb111c8ea88b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.891612] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.891843] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.893094] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c2959ce-b365-4600-8792-89e3e90d3014 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.902159] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896804, 'name': CreateVM_Task, 'duration_secs': 0.367945} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.902649] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.903331] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.903493] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.903788] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.904046] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5f2c52e-e75c-40ed-8b70-36843dcf54be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.907422] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 925.907422] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a38a50-30ac-9275-e7d0-7e8c5750b474" [ 925.907422] env[69992]: _type = "Task" [ 925.907422] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.913393] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 925.913393] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529ad032-823a-4a3e-57ab-9a5fd2e44554" [ 925.913393] env[69992]: _type = "Task" [ 925.913393] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.925720] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a38a50-30ac-9275-e7d0-7e8c5750b474, 'name': SearchDatastore_Task, 'duration_secs': 0.010997} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.928145] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03945823-3538-4718-9132-ba4b716ccc8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.930731] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b2a7b5-08be-4797-9913-c15710f69b64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.939325] env[69992]: DEBUG oslo_vmware.api [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896805, 'name': PowerOffVM_Task, 'duration_secs': 0.263412} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.939554] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529ad032-823a-4a3e-57ab-9a5fd2e44554, 'name': SearchDatastore_Task, 'duration_secs': 0.009724} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.940655] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.940873] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.941226] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.941451] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.941653] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.942284] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc7f1381-da92-4505-a6c6-eea95b20cd51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.946658] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 925.946658] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]520dddef-a80a-48af-6147-69bde78145bc" [ 925.946658] env[69992]: _type = "Task" [ 925.946658] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.948263] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bbfa69-b9c1-480f-822c-de1beec8f63c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.960581] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520dddef-a80a-48af-6147-69bde78145bc, 'name': SearchDatastore_Task, 'duration_secs': 0.010105} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.988802] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.989212] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2/1f9d0558-63fb-4a6f-a2d2-dd7a334249a2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 925.990211] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.990419] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.990644] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-812f5947-d8cd-48f9-a501-b634695bcef8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.993378] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5bf84a-7820-4975-b5c6-caff6a05f4cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.996416] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c26f634-f579-4b6b-92c2-c880c7c3ac24 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.006496] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b02877b-4f27-4b04-900e-e59ff3265e3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.011141] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 926.011141] env[69992]: value = "task-2896808" [ 926.011141] env[69992]: _type = "Task" [ 926.011141] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.012936] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.013567] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.017311] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b1f058e-02d6-47fd-af1e-8b07d1c9e9a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.029105] env[69992]: DEBUG nova.compute.provider_tree [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.030676] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.030890] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.031099] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Deleting the datastore file [datastore2] 1d5722e1-5a48-4212-bbc7-527a3739db6e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.032425] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5219ff97-72c4-4fef-8064-ddc23de478d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.039086] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.039265] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 926.039265] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5243f4ee-7f74-6e6f-8480-ba5cc3eec706" [ 926.039265] env[69992]: _type = "Task" [ 926.039265] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.046084] env[69992]: DEBUG oslo_vmware.api [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for the task: (returnval){ [ 926.046084] env[69992]: value = "task-2896809" [ 926.046084] env[69992]: _type = "Task" [ 926.046084] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.050467] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5243f4ee-7f74-6e6f-8480-ba5cc3eec706, 'name': SearchDatastore_Task, 'duration_secs': 0.010533} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.054304] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12001ef1-cd7f-4e6a-862f-6a6a3be67040 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.062475] env[69992]: DEBUG oslo_vmware.api [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896809, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.063782] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 926.063782] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b9b288-c850-3b1e-d361-c02961ef7765" [ 926.063782] env[69992]: _type = "Task" [ 926.063782] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.073320] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b9b288-c850-3b1e-d361-c02961ef7765, 'name': SearchDatastore_Task, 'duration_secs': 0.010115} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.073580] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.073863] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.074142] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b42ca4c-101d-4c75-aed6-3b59106d5710 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.081337] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 926.081337] env[69992]: value = "task-2896810" [ 926.081337] env[69992]: _type = "Task" [ 926.081337] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.090389] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.114258] env[69992]: DEBUG oslo_concurrency.lockutils [req-c5a488ec-e0a9-44c0-9eeb-a83d5fd10561 req-35fc2c16-ec57-4ce6-990b-9c4b198be737 service nova] Releasing lock "refresh_cache-1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.172202] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.258068] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 926.282589] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 926.283064] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.283363] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 926.283709] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.284040] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 926.284328] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 926.287060] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 926.287060] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 926.287060] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 926.287060] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 926.287060] env[69992]: DEBUG nova.virt.hardware [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 926.287218] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563caedd-cb30-4bf2-a745-0da73e1b45b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.295216] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c98cb4-75cd-4ed6-b124-6cdb1d31a3ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.356977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "refresh_cache-ab3df643-58db-45b7-a572-9c040135989d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.356977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquired lock "refresh_cache-ab3df643-58db-45b7-a572-9c040135989d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.356977] env[69992]: DEBUG nova.network.neutron [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 926.525920] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.533010] env[69992]: DEBUG nova.scheduler.client.report [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.560604] env[69992]: DEBUG oslo_vmware.api [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Task: {'id': task-2896809, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145155} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.560859] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.561148] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.561231] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.561399] env[69992]: INFO nova.compute.manager [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 926.561637] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.561822] env[69992]: DEBUG nova.compute.manager [-] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.561915] env[69992]: DEBUG nova.network.neutron [-] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.592601] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.908784] env[69992]: DEBUG nova.network.neutron [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.028577] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896808, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.038601] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.039856] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 927.046868] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.712s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.048855] env[69992]: INFO nova.compute.claims [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.097992] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896810, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.135338] env[69992]: DEBUG nova.compute.manager [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 927.136528] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09422a18-2379-45e8-bbb8-601f28d5fed2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.197112] env[69992]: DEBUG nova.network.neutron [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Updating instance_info_cache with network_info: [{"id": "13280c7f-380d-4f20-b42f-532775cfc598", "address": "fa:16:3e:32:2f:43", "network": {"id": "4cf224aa-0367-450e-b68b-fa2f3f0bc660", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1694183586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1606051f4de9477381d3b1bcea697f5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13280c7f-38", "ovs_interfaceid": "13280c7f-380d-4f20-b42f-532775cfc598", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.255737] env[69992]: DEBUG nova.network.neutron [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Successfully updated port: be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.524931] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896808, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.362201} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.525231] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2/1f9d0558-63fb-4a6f-a2d2-dd7a334249a2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.525451] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.525703] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3975eee5-eb49-4d0b-a1f5-91eead1245b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.534063] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 927.534063] env[69992]: value = "task-2896811" [ 927.534063] env[69992]: _type = "Task" [ 927.534063] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.544291] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.553750] env[69992]: DEBUG nova.compute.utils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 927.558202] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 927.558383] env[69992]: DEBUG nova.network.neutron [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.595126] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896810, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.283975} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.595126] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.595340] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.595603] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02ee2ac8-c064-4de0-8da4-f44b902dcb85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.603724] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 927.603724] env[69992]: value = "task-2896812" [ 927.603724] env[69992]: _type = "Task" [ 927.603724] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.612638] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.614277] env[69992]: DEBUG nova.policy [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c3b862089a44a5a0aa4df1353e6630', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '034ac686ad0d438cbe7e56c546f87505', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 927.636966] env[69992]: DEBUG nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Received event network-vif-plugged-13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.637248] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Acquiring lock "ab3df643-58db-45b7-a572-9c040135989d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.637625] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Lock "ab3df643-58db-45b7-a572-9c040135989d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.637625] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Lock "ab3df643-58db-45b7-a572-9c040135989d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.637774] env[69992]: DEBUG nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] No waiting events found dispatching network-vif-plugged-13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 927.637944] env[69992]: WARNING nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Received unexpected event network-vif-plugged-13280c7f-380d-4f20-b42f-532775cfc598 for instance with vm_state building and task_state spawning. [ 927.638309] env[69992]: DEBUG nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Received event network-changed-13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.638435] env[69992]: DEBUG nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Refreshing instance network info cache due to event network-changed-13280c7f-380d-4f20-b42f-532775cfc598. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 927.638697] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Acquiring lock "refresh_cache-ab3df643-58db-45b7-a572-9c040135989d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.656476] env[69992]: INFO nova.compute.manager [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] instance snapshotting [ 927.659390] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd33825-0220-4c95-a880-f2c02ba38b64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.682066] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2561a97-72dc-4efa-8646-1c4331551270 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.702552] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Releasing lock "refresh_cache-ab3df643-58db-45b7-a572-9c040135989d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.702877] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Instance network_info: |[{"id": "13280c7f-380d-4f20-b42f-532775cfc598", "address": "fa:16:3e:32:2f:43", "network": {"id": "4cf224aa-0367-450e-b68b-fa2f3f0bc660", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1694183586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1606051f4de9477381d3b1bcea697f5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13280c7f-38", "ovs_interfaceid": "13280c7f-380d-4f20-b42f-532775cfc598", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 927.703187] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Acquired lock "refresh_cache-ab3df643-58db-45b7-a572-9c040135989d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.703373] env[69992]: DEBUG nova.network.neutron [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Refreshing network info cache for port 13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.704572] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:2f:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69351262-8d39-441a-85ba-3a78df436d17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13280c7f-380d-4f20-b42f-532775cfc598', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.715250] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Creating folder: Project (1606051f4de9477381d3b1bcea697f5d). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 927.716577] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fec737d3-9799-4e94-8108-9e34c549001d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.730248] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Created folder: Project (1606051f4de9477381d3b1bcea697f5d) in parent group-v581821. [ 927.730464] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Creating folder: Instances. Parent ref: group-v581902. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 927.730897] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80fd60be-7a20-4d48-a7a3-b84cf5b28c71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.741723] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Created folder: Instances in parent group-v581902. [ 927.741999] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 927.742218] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab3df643-58db-45b7-a572-9c040135989d] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.742467] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08c75868-dc69-4ee5-86b9-ed744eb8b3db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.759398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.759741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.759741] env[69992]: DEBUG nova.network.neutron [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.769571] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.769571] env[69992]: value = "task-2896815" [ 927.769571] env[69992]: _type = "Task" [ 927.769571] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.776936] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896815, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.787910] env[69992]: DEBUG nova.network.neutron [-] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.978237] env[69992]: DEBUG nova.compute.manager [req-5adaa962-31bf-485d-8e89-ddf0824d622e req-c0b74e4f-4d4f-4821-874b-594bce078445 service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Received event network-vif-plugged-be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.978467] env[69992]: DEBUG oslo_concurrency.lockutils [req-5adaa962-31bf-485d-8e89-ddf0824d622e req-c0b74e4f-4d4f-4821-874b-594bce078445 service nova] Acquiring lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.978579] env[69992]: DEBUG oslo_concurrency.lockutils [req-5adaa962-31bf-485d-8e89-ddf0824d622e req-c0b74e4f-4d4f-4821-874b-594bce078445 service nova] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.978690] env[69992]: DEBUG oslo_concurrency.lockutils [req-5adaa962-31bf-485d-8e89-ddf0824d622e req-c0b74e4f-4d4f-4821-874b-594bce078445 service nova] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.978849] env[69992]: DEBUG nova.compute.manager [req-5adaa962-31bf-485d-8e89-ddf0824d622e req-c0b74e4f-4d4f-4821-874b-594bce078445 service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] No waiting events found dispatching network-vif-plugged-be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 927.979104] env[69992]: WARNING nova.compute.manager [req-5adaa962-31bf-485d-8e89-ddf0824d622e req-c0b74e4f-4d4f-4821-874b-594bce078445 service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Received unexpected event network-vif-plugged-be91de4c-766f-4a66-b07b-2dd3cbe88350 for instance with vm_state building and task_state spawning. [ 928.045165] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148892} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.045429] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.046215] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8828a0c2-912c-4d0a-8d11-bb175230954e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.063991] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 928.075352] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2/1f9d0558-63fb-4a6f-a2d2-dd7a334249a2.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.076219] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-408e8584-ffd4-460b-8533-e4504bc1ea70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.091273] env[69992]: DEBUG nova.network.neutron [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Successfully created port: 0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.099918] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 928.099918] env[69992]: value = "task-2896817" [ 928.099918] env[69992]: _type = "Task" [ 928.099918] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.115996] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896817, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.122653] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069608} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.122927] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.123693] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73ccdc3-ee07-467a-a255-46337d510e36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.156091] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.156697] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff190e4b-f01f-47ed-81e3-7fd010407b4a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.182709] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 928.182709] env[69992]: value = "task-2896818" [ 928.182709] env[69992]: _type = "Task" [ 928.182709] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.195897] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.196216] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 928.197030] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9bb1fa89-ce93-4935-8f05-c1a1d5765121 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.204497] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 928.204497] env[69992]: value = "task-2896819" [ 928.204497] env[69992]: _type = "Task" [ 928.204497] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.215059] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896819, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.281798] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896815, 'name': CreateVM_Task, 'duration_secs': 0.388481} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.285780] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab3df643-58db-45b7-a572-9c040135989d] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.287115] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.287423] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.287897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 928.288403] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-292bbe8b-1e9c-4c69-9614-86039dd1d086 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.291255] env[69992]: INFO nova.compute.manager [-] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Took 1.73 seconds to deallocate network for instance. [ 928.300339] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 928.300339] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e7a31f-1e54-bde4-6957-71ab3b2f64ab" [ 928.300339] env[69992]: _type = "Task" [ 928.300339] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.310806] env[69992]: DEBUG nova.network.neutron [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.323943] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e7a31f-1e54-bde4-6957-71ab3b2f64ab, 'name': SearchDatastore_Task, 'duration_secs': 0.011138} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.324464] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.324848] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.325245] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.325500] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.325792] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.329870] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78c7d46d-ca3a-4765-8157-a0adfb37aef6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.344036] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.344327] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.347576] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ab97b50-e823-4dd7-bacd-dda7d4ef0247 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.354780] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 928.354780] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525ac5ad-80c3-b6ae-c5c3-fbad63eb9915" [ 928.354780] env[69992]: _type = "Task" [ 928.354780] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.368300] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525ac5ad-80c3-b6ae-c5c3-fbad63eb9915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.573891] env[69992]: DEBUG nova.network.neutron [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Updating instance_info_cache with network_info: [{"id": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "address": "fa:16:3e:63:8a:1d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe91de4c-76", "ovs_interfaceid": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.591107] env[69992]: DEBUG nova.network.neutron [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Updated VIF entry in instance network info cache for port 13280c7f-380d-4f20-b42f-532775cfc598. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.591926] env[69992]: DEBUG nova.network.neutron [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Updating instance_info_cache with network_info: [{"id": "13280c7f-380d-4f20-b42f-532775cfc598", "address": "fa:16:3e:32:2f:43", "network": {"id": "4cf224aa-0367-450e-b68b-fa2f3f0bc660", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1694183586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1606051f4de9477381d3b1bcea697f5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69351262-8d39-441a-85ba-3a78df436d17", "external-id": "nsx-vlan-transportzone-205", "segmentation_id": 205, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13280c7f-38", "ovs_interfaceid": "13280c7f-380d-4f20-b42f-532775cfc598", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.611149] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.697245] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896818, 'name': ReconfigVM_Task, 'duration_secs': 0.314119} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.697517] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 00b2fd0b-7841-448d-82cf-436aa8d80cda/00b2fd0b-7841-448d-82cf-436aa8d80cda.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.698166] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fee099b1-36ca-446f-88cc-78d1afb872ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.711044] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 928.711044] env[69992]: value = "task-2896820" [ 928.711044] env[69992]: _type = "Task" [ 928.711044] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.717390] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896819, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.722607] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896820, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.737409] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbca4c8c-5f7f-4651-ac29-f368710c35a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.746211] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c1dbf5-face-4a4c-9cfa-e1571b9153e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.779439] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe849654-9f64-4747-b2e4-8ed662c0f30d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.787976] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb2c00d-8384-4328-9dee-5bcfeaaa7189 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.805379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.805640] env[69992]: DEBUG nova.compute.provider_tree [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.865168] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525ac5ad-80c3-b6ae-c5c3-fbad63eb9915, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.866165] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f70d418e-5aa9-4e35-a38f-83388ff2e8ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.871638] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 928.871638] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52764355-3def-b81c-2268-6317f007ba79" [ 928.871638] env[69992]: _type = "Task" [ 928.871638] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.879778] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52764355-3def-b81c-2268-6317f007ba79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.076439] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.076780] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Instance network_info: |[{"id": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "address": "fa:16:3e:63:8a:1d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe91de4c-76", "ovs_interfaceid": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 929.077275] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:8a:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be91de4c-766f-4a66-b07b-2dd3cbe88350', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.085946] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Creating folder: Project (034ac686ad0d438cbe7e56c546f87505). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.087038] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 929.089034] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5d82a21-6ca9-45d1-94d2-107e526a17b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.094170] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Releasing lock "refresh_cache-ab3df643-58db-45b7-a572-9c040135989d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.094419] env[69992]: DEBUG nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.094587] env[69992]: DEBUG nova.compute.manager [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing instance network info cache due to event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 929.094836] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Acquiring lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.095017] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Acquired lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.095194] env[69992]: DEBUG nova.network.neutron [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.106555] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Created folder: Project (034ac686ad0d438cbe7e56c546f87505) in parent group-v581821. [ 929.106752] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Creating folder: Instances. Parent ref: group-v581906. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.107895] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4b39e1f-0eca-44de-bf03-e0fc28d09740 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.115576] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896817, 'name': ReconfigVM_Task, 'duration_secs': 0.943082} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.116009] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2/1f9d0558-63fb-4a6f-a2d2-dd7a334249a2.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.116464] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d2325c2-7600-4719-a3ae-de4c0eee203b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.121849] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Created folder: Instances in parent group-v581906. [ 929.121957] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 929.122157] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.122352] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-765f2d8d-8711-4e6f-b6da-ad6959cb2169 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.141395] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 929.141657] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.141818] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 929.141998] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.142156] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 929.142301] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 929.142507] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 929.142660] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 929.142822] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 929.142980] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 929.143399] env[69992]: DEBUG nova.virt.hardware [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 929.144130] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 929.144130] env[69992]: value = "task-2896823" [ 929.144130] env[69992]: _type = "Task" [ 929.144130] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.144830] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45ac7e7-cff9-47bf-adea-d459339afd49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.154372] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.154372] env[69992]: value = "task-2896824" [ 929.154372] env[69992]: _type = "Task" [ 929.154372] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.159357] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896823, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.163187] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4bc130-8738-4777-831c-322efb771bfc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.172703] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896824, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.218324] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896819, 'name': CreateSnapshot_Task, 'duration_secs': 0.564458} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.219134] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 929.219958] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636f7cc4-8a92-4818-b797-cf0c8cf8d328 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.226166] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896820, 'name': Rename_Task, 'duration_secs': 0.152178} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.227140] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.227206] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e72e3d4c-cb22-457b-9689-05fa41d37ec6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.240859] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 929.240859] env[69992]: value = "task-2896825" [ 929.240859] env[69992]: _type = "Task" [ 929.240859] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.249989] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.310068] env[69992]: DEBUG nova.scheduler.client.report [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.382107] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52764355-3def-b81c-2268-6317f007ba79, 'name': SearchDatastore_Task, 'duration_secs': 0.009257} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.382389] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.382653] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ab3df643-58db-45b7-a572-9c040135989d/ab3df643-58db-45b7-a572-9c040135989d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.382915] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee857861-9406-4eb2-b2de-b9d151bcf8f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.391077] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 929.391077] env[69992]: value = "task-2896826" [ 929.391077] env[69992]: _type = "Task" [ 929.391077] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.405344] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.658964] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896823, 'name': Rename_Task, 'duration_secs': 0.232116} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.659370] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.662623] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-752aee73-e57f-4442-b14b-b1bbd9e85aa3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.673610] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896824, 'name': CreateVM_Task, 'duration_secs': 0.367358} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.677595] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.678094] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 929.678094] env[69992]: value = "task-2896827" [ 929.678094] env[69992]: _type = "Task" [ 929.678094] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.678929] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.679072] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.679670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 929.680141] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6595f14-54b3-4509-b717-ff4b8374c548 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.692328] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.693720] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 929.693720] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5211fb2a-36cc-dc2b-04dc-bd8c4daff2d6" [ 929.693720] env[69992]: _type = "Task" [ 929.693720] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.707998] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5211fb2a-36cc-dc2b-04dc-bd8c4daff2d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.743542] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 929.743953] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e2db749a-8d2b-41a9-8c62-59bbea108c40 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.758513] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896825, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.760736] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 929.760736] env[69992]: value = "task-2896828" [ 929.760736] env[69992]: _type = "Task" [ 929.760736] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.774820] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896828, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.814888] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.815551] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 929.818454] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.908s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.818628] env[69992]: DEBUG nova.objects.instance [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lazy-loading 'resources' on Instance uuid c1d73002-6e69-41a6-95b3-34dccaf872ef {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.842081] env[69992]: DEBUG nova.network.neutron [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updated VIF entry in instance network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.842472] env[69992]: DEBUG nova.network.neutron [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.905171] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896826, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.193055] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896827, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.205892] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5211fb2a-36cc-dc2b-04dc-bd8c4daff2d6, 'name': SearchDatastore_Task, 'duration_secs': 0.062143} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.206248] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.206480] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.206728] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.206989] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.207165] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.207465] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e78d169-a3ff-48c6-b63f-4adeab9ce8a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.232586] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.232825] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.233588] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-440fab10-6cae-4689-9170-af38f03bf2d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.240610] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 930.240610] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527afe31-3f9f-36ab-9b05-943379df54c2" [ 930.240610] env[69992]: _type = "Task" [ 930.240610] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.254517] env[69992]: DEBUG oslo_vmware.api [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896825, 'name': PowerOnVM_Task, 'duration_secs': 0.598186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.258047] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.258354] env[69992]: DEBUG nova.compute.manager [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.258698] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527afe31-3f9f-36ab-9b05-943379df54c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.259555] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e619da7-7b84-4442-826f-442b37f8ed7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.271907] env[69992]: DEBUG nova.network.neutron [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Successfully updated port: 0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 930.279354] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896828, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.321671] env[69992]: DEBUG nova.compute.utils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 930.327040] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 930.327040] env[69992]: DEBUG nova.network.neutron [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.347410] env[69992]: DEBUG oslo_concurrency.lockutils [req-905fbcd8-dd45-47da-a5fb-9f6999f23031 req-ffcb99f9-ed28-4db8-b592-55c04960b1b0 service nova] Releasing lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.404083] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584341} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.404394] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ab3df643-58db-45b7-a572-9c040135989d/ab3df643-58db-45b7-a572-9c040135989d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.404603] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.404989] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e06aaf0-e8f5-40e9-8e76-00418429b0a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.415367] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 930.415367] env[69992]: value = "task-2896830" [ 930.415367] env[69992]: _type = "Task" [ 930.415367] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.423870] env[69992]: DEBUG nova.policy [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3c3b862089a44a5a0aa4df1353e6630', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '034ac686ad0d438cbe7e56c546f87505', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 930.433251] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.692609] env[69992]: DEBUG oslo_vmware.api [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896827, 'name': PowerOnVM_Task, 'duration_secs': 0.93959} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.693372] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.693477] env[69992]: INFO nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Took 10.53 seconds to spawn the instance on the hypervisor. [ 930.693814] env[69992]: DEBUG nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.697475] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fd3161-d985-41e4-b9c8-fc56993c84f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.756333] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527afe31-3f9f-36ab-9b05-943379df54c2, 'name': SearchDatastore_Task, 'duration_secs': 0.015965} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.759886] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-107d5a5f-bb78-4af7-94f3-f9284730a619 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.768698] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 930.768698] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e59f58-7b1c-4b76-8244-8194c7fc1e06" [ 930.768698] env[69992]: _type = "Task" [ 930.768698] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.776558] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "refresh_cache-7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.776558] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "refresh_cache-7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.776558] env[69992]: DEBUG nova.network.neutron [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.781964] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896828, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.787562] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.793179] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e59f58-7b1c-4b76-8244-8194c7fc1e06, 'name': SearchDatastore_Task, 'duration_secs': 0.015227} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.795049] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.795049] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 98cd0eb8-d17a-4a9b-a172-1ba1207168d0/98cd0eb8-d17a-4a9b-a172-1ba1207168d0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.796716] env[69992]: DEBUG nova.compute.manager [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Received event network-vif-deleted-58835cdc-7ea4-4647-9a86-35f7cb486922 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.798455] env[69992]: DEBUG nova.compute.manager [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.798455] env[69992]: DEBUG nova.compute.manager [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing instance network info cache due to event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 930.798455] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] Acquiring lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.798455] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] Acquired lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.798455] env[69992]: DEBUG nova.network.neutron [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.802182] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abb972ee-67c3-45c4-bd8a-fafa3ceb6a90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.813094] env[69992]: DEBUG nova.compute.manager [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Received event network-changed-be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.814097] env[69992]: DEBUG nova.compute.manager [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Refreshing instance network info cache due to event network-changed-be91de4c-766f-4a66-b07b-2dd3cbe88350. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 930.814097] env[69992]: DEBUG oslo_concurrency.lockutils [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] Acquiring lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.814097] env[69992]: DEBUG oslo_concurrency.lockutils [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] Acquired lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.814097] env[69992]: DEBUG nova.network.neutron [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Refreshing network info cache for port be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.817178] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 930.817178] env[69992]: value = "task-2896831" [ 930.817178] env[69992]: _type = "Task" [ 930.817178] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.830514] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 930.833796] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.927993] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096571} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.928427] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.929346] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb26e886-d709-46f2-b4f4-0e7b1abad61f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.952149] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] ab3df643-58db-45b7-a572-9c040135989d/ab3df643-58db-45b7-a572-9c040135989d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.955312] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-613844a4-94f4-403d-9117-3345cd112069 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.978208] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 930.978208] env[69992]: value = "task-2896832" [ 930.978208] env[69992]: _type = "Task" [ 930.978208] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.989882] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896832, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.006970] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa6f1ba-9300-4c1b-9d5d-df52588222b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.015858] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985f36d3-b0a0-4b71-bf73-01552c37dbd0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.049410] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea20cd0-64ff-42e1-803b-2600ed6f1676 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.058233] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4417f691-39e8-49ef-81d5-e4195269931c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.073224] env[69992]: DEBUG nova.compute.provider_tree [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.220089] env[69992]: INFO nova.compute.manager [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Took 39.80 seconds to build instance. [ 931.276769] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896828, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.325670] env[69992]: DEBUG nova.network.neutron [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Successfully created port: 1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.338641] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896831, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.370030] env[69992]: DEBUG nova.network.neutron [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.490851] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896832, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.576843] env[69992]: DEBUG nova.scheduler.client.report [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.722729] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8384ed53-9a57-4df1-89d6-94768b35f047 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.459s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.759756] env[69992]: DEBUG nova.network.neutron [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Updating instance_info_cache with network_info: [{"id": "0964ccc2-743e-4ab2-bbee-76f6b55f151e", "address": "fa:16:3e:12:21:b9", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0964ccc2-74", "ovs_interfaceid": "0964ccc2-743e-4ab2-bbee-76f6b55f151e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.777990] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896828, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.815754] env[69992]: DEBUG nova.network.neutron [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updated VIF entry in instance network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.816166] env[69992]: DEBUG nova.network.neutron [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.822355] env[69992]: DEBUG nova.network.neutron [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Updated VIF entry in instance network info cache for port be91de4c-766f-4a66-b07b-2dd3cbe88350. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.822355] env[69992]: DEBUG nova.network.neutron [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Updating instance_info_cache with network_info: [{"id": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "address": "fa:16:3e:63:8a:1d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe91de4c-76", "ovs_interfaceid": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.841815] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 931.847018] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.847018] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 98cd0eb8-d17a-4a9b-a172-1ba1207168d0/98cd0eb8-d17a-4a9b-a172-1ba1207168d0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.847018] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.847018] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0418d471-fb50-4a73-9052-4997ee7ac67e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.856853] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 931.856853] env[69992]: value = "task-2896833" [ 931.856853] env[69992]: _type = "Task" [ 931.856853] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.868541] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.875068] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 931.875347] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 931.875518] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 931.875700] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 931.875906] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 931.876258] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 931.876331] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 931.876438] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 931.876605] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 931.876768] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 931.876941] env[69992]: DEBUG nova.virt.hardware [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 931.878096] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dcd51e-630b-4252-b551-6109dfe22d28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.887104] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7454e5-aba7-4ad8-acf2-3ea491451213 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.990814] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896832, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.083048] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.265s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.085920] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.869s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.088947] env[69992]: INFO nova.compute.claims [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.123163] env[69992]: INFO nova.scheduler.client.report [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Deleted allocations for instance c1d73002-6e69-41a6-95b3-34dccaf872ef [ 932.228120] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 932.264716] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "refresh_cache-7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.265081] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Instance network_info: |[{"id": "0964ccc2-743e-4ab2-bbee-76f6b55f151e", "address": "fa:16:3e:12:21:b9", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0964ccc2-74", "ovs_interfaceid": "0964ccc2-743e-4ab2-bbee-76f6b55f151e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 932.265757] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:21:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0964ccc2-743e-4ab2-bbee-76f6b55f151e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.274112] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.275024] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.279874] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13858906-cd0c-4d63-8593-461d3bb5f990 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.308520] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896828, 'name': CloneVM_Task, 'duration_secs': 2.356371} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.310388] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Created linked-clone VM from snapshot [ 932.310611] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.310611] env[69992]: value = "task-2896835" [ 932.310611] env[69992]: _type = "Task" [ 932.310611] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.311342] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a322c92-0332-4ca6-ab10-8a2a6d4f6ba9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.320543] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] Releasing lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.320805] env[69992]: DEBUG nova.compute.manager [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Received event network-changed-59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.320993] env[69992]: DEBUG nova.compute.manager [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Refreshing instance network info cache due to event network-changed-59bdab57-cc8d-40d6-90b5-e7582a1f1500. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 932.321223] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] Acquiring lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.321367] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] Acquired lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.321528] env[69992]: DEBUG nova.network.neutron [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Refreshing network info cache for port 59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.327167] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Uploading image 97dcfec5-7f0a-4cf9-a9e8-12828f18d9c7 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 932.335880] env[69992]: DEBUG oslo_concurrency.lockutils [req-e238141d-2d5e-4eca-a870-a78a985b87b0 req-cdbed207-57f6-4884-8e67-6d676b03861f service nova] Releasing lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.335880] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896835, 'name': CreateVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.363561] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 932.363561] env[69992]: value = "vm-581909" [ 932.363561] env[69992]: _type = "VirtualMachine" [ 932.363561] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 932.363879] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7e980431-f05f-4c63-85e1-5034ff829b6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.372065] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141542} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.373381] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.373756] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lease: (returnval){ [ 932.373756] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d2e6db-45f6-976e-de90-24ee27385ea1" [ 932.373756] env[69992]: _type = "HttpNfcLease" [ 932.373756] env[69992]: } obtained for exporting VM: (result){ [ 932.373756] env[69992]: value = "vm-581909" [ 932.373756] env[69992]: _type = "VirtualMachine" [ 932.373756] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 932.373959] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the lease: (returnval){ [ 932.373959] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d2e6db-45f6-976e-de90-24ee27385ea1" [ 932.373959] env[69992]: _type = "HttpNfcLease" [ 932.373959] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 932.374746] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e486ee-e184-4d57-8b0e-2c7bfdb4580a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.384829] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.384829] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d2e6db-45f6-976e-de90-24ee27385ea1" [ 932.384829] env[69992]: _type = "HttpNfcLease" [ 932.384829] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 932.403760] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 98cd0eb8-d17a-4a9b-a172-1ba1207168d0/98cd0eb8-d17a-4a9b-a172-1ba1207168d0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.404527] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-229e2df2-ae9b-45d0-9700-5c03f41b9785 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.425940] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 932.425940] env[69992]: value = "task-2896837" [ 932.425940] env[69992]: _type = "Task" [ 932.425940] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.435403] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896837, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.491530] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896832, 'name': ReconfigVM_Task, 'duration_secs': 1.046029} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.491846] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Reconfigured VM instance instance-00000019 to attach disk [datastore1] ab3df643-58db-45b7-a572-9c040135989d/ab3df643-58db-45b7-a572-9c040135989d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.492705] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45a5c536-87b1-4a79-8b0f-b731d6485c5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.502253] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 932.502253] env[69992]: value = "task-2896838" [ 932.502253] env[69992]: _type = "Task" [ 932.502253] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.513861] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896838, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.636866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82022f36-bf61-4ba9-80c5-372e51103779 tempest-ServerDiagnosticsNegativeTest-1117125106 tempest-ServerDiagnosticsNegativeTest-1117125106-project-member] Lock "c1d73002-6e69-41a6-95b3-34dccaf872ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.209s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.754727] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.826323] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896835, 'name': CreateVM_Task, 'duration_secs': 0.386378} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.826746] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.831098] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.831098] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.831098] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 932.831098] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b246ea1-75d9-41fc-8bda-f0d434d2d38b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.836742] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 932.836742] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b79e34-01bf-4e33-229d-209b7e234805" [ 932.836742] env[69992]: _type = "Task" [ 932.836742] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.846205] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b79e34-01bf-4e33-229d-209b7e234805, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.886192] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.886192] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d2e6db-45f6-976e-de90-24ee27385ea1" [ 932.886192] env[69992]: _type = "HttpNfcLease" [ 932.886192] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 932.886831] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 932.886831] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d2e6db-45f6-976e-de90-24ee27385ea1" [ 932.886831] env[69992]: _type = "HttpNfcLease" [ 932.886831] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 932.887423] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6cbf09-f60d-49cc-a212-5d5e32025420 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.895804] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521390bd-7c0d-fc08-8838-76749e2ae773/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 932.895981] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521390bd-7c0d-fc08-8838-76749e2ae773/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 932.976754] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896837, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.012693] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896838, 'name': Rename_Task, 'duration_secs': 0.497621} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.015396] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.019881] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc34e20e-563b-471f-b00e-c90c75e577d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.020499] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b4bf6581-e784-45cc-a9ed-59aac809ff25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.029927] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 933.029927] env[69992]: value = "task-2896839" [ 933.029927] env[69992]: _type = "Task" [ 933.029927] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.041695] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.160296] env[69992]: DEBUG nova.network.neutron [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updated VIF entry in instance network info cache for port 59bdab57-cc8d-40d6-90b5-e7582a1f1500. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.160610] env[69992]: DEBUG nova.network.neutron [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [{"id": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "address": "fa:16:3e:e6:8b:79", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bdab57-cc", "ovs_interfaceid": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.351491] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b79e34-01bf-4e33-229d-209b7e234805, 'name': SearchDatastore_Task, 'duration_secs': 0.025583} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.351822] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.352148] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.352424] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.352575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.352755] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.353040] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c53a1d48-c81f-41f8-ad35-57063011ee26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.370519] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.370719] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.372264] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ab54e2c-41b6-426c-819c-e22410656656 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.379369] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 933.379369] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52beafab-2a17-ba45-f2d1-b4f8620bffd4" [ 933.379369] env[69992]: _type = "Task" [ 933.379369] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.394019] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52beafab-2a17-ba45-f2d1-b4f8620bffd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.473116] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896837, 'name': ReconfigVM_Task, 'duration_secs': 0.619449} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.473116] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 98cd0eb8-d17a-4a9b-a172-1ba1207168d0/98cd0eb8-d17a-4a9b-a172-1ba1207168d0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.473116] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fd26d70-c583-4727-b20b-b162f2115762 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.483436] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 933.483436] env[69992]: value = "task-2896840" [ 933.483436] env[69992]: _type = "Task" [ 933.483436] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.494323] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896840, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.550761] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896839, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.564440] env[69992]: DEBUG nova.compute.manager [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Received event network-vif-plugged-0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.566545] env[69992]: DEBUG oslo_concurrency.lockutils [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] Acquiring lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.566776] env[69992]: DEBUG oslo_concurrency.lockutils [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.566979] env[69992]: DEBUG oslo_concurrency.lockutils [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.567130] env[69992]: DEBUG nova.compute.manager [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] No waiting events found dispatching network-vif-plugged-0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.567305] env[69992]: WARNING nova.compute.manager [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Received unexpected event network-vif-plugged-0964ccc2-743e-4ab2-bbee-76f6b55f151e for instance with vm_state building and task_state spawning. [ 933.567467] env[69992]: DEBUG nova.compute.manager [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Received event network-changed-0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.567619] env[69992]: DEBUG nova.compute.manager [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Refreshing instance network info cache due to event network-changed-0964ccc2-743e-4ab2-bbee-76f6b55f151e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 933.567825] env[69992]: DEBUG oslo_concurrency.lockutils [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] Acquiring lock "refresh_cache-7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.567965] env[69992]: DEBUG oslo_concurrency.lockutils [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] Acquired lock "refresh_cache-7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.568546] env[69992]: DEBUG nova.network.neutron [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Refreshing network info cache for port 0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 933.581741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "00b2fd0b-7841-448d-82cf-436aa8d80cda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.581983] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.582204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "00b2fd0b-7841-448d-82cf-436aa8d80cda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.582389] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.582565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.586279] env[69992]: INFO nova.compute.manager [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Terminating instance [ 933.626920] env[69992]: DEBUG nova.network.neutron [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Successfully updated port: 1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.663411] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b5ab814-b01d-4aa7-996a-026a17467d89 req-67e9915b-e0c7-4682-a9b7-b5aaab0ce5db service nova] Releasing lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.713377] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd46fc1-a77a-4126-bfa5-499553754001 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.724534] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e90e95-2f03-4794-8e91-7b6ca20f2123 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.761164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62725b3-112b-434f-9cb5-1c1b97a52e04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.772235] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393fa6c4-d856-4139-bc6c-f763fa8ab804 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.787316] env[69992]: DEBUG nova.compute.provider_tree [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.898542] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52beafab-2a17-ba45-f2d1-b4f8620bffd4, 'name': SearchDatastore_Task, 'duration_secs': 0.049019} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.903996] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78e31450-2a16-4835-bb69-1f1ac00c46c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.914839] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 933.914839] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521edc0c-434a-88e3-a689-4ddc4cedf011" [ 933.914839] env[69992]: _type = "Task" [ 933.914839] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.928979] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521edc0c-434a-88e3-a689-4ddc4cedf011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.996492] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896840, 'name': Rename_Task, 'duration_secs': 0.180765} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.996492] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.996492] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1e1c5ba-1af8-416b-a66e-e673acc7f529 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.005024] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 934.005024] env[69992]: value = "task-2896841" [ 934.005024] env[69992]: _type = "Task" [ 934.005024] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.016196] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.043064] env[69992]: DEBUG oslo_vmware.api [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896839, 'name': PowerOnVM_Task, 'duration_secs': 0.846401} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.043641] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.043992] env[69992]: INFO nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Took 9.57 seconds to spawn the instance on the hypervisor. [ 934.044257] env[69992]: DEBUG nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.045484] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3613730-6493-4fc4-a09f-e313b6cde3bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.098133] env[69992]: DEBUG nova.compute.manager [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 934.098394] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.099373] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f39ace0-59e5-4e1b-9e0e-a9c55a61ca4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.119813] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 934.120638] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5be5239d-5995-43e5-9e67-668a8a0b9fcd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.129783] env[69992]: DEBUG oslo_vmware.api [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 934.129783] env[69992]: value = "task-2896842" [ 934.129783] env[69992]: _type = "Task" [ 934.129783] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.135796] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "refresh_cache-94a4a16e-926c-47ce-a5a7-0b216b7c5442" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.136088] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "refresh_cache-94a4a16e-926c-47ce-a5a7-0b216b7c5442" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.136258] env[69992]: DEBUG nova.network.neutron [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.144858] env[69992]: DEBUG oslo_vmware.api [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.290197] env[69992]: DEBUG nova.scheduler.client.report [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.429364] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521edc0c-434a-88e3-a689-4ddc4cedf011, 'name': SearchDatastore_Task, 'duration_secs': 0.013752} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.429722] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.430091] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97/7fbab19d-5a0a-4da3-b078-40ca0eaf8c97.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.430821] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34c62ce2-457b-417a-a7f9-ac6470872bc3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.445451] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 934.445451] env[69992]: value = "task-2896844" [ 934.445451] env[69992]: _type = "Task" [ 934.445451] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.458645] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.516973] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.550408] env[69992]: DEBUG nova.network.neutron [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Updated VIF entry in instance network info cache for port 0964ccc2-743e-4ab2-bbee-76f6b55f151e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 934.550408] env[69992]: DEBUG nova.network.neutron [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Updating instance_info_cache with network_info: [{"id": "0964ccc2-743e-4ab2-bbee-76f6b55f151e", "address": "fa:16:3e:12:21:b9", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0964ccc2-74", "ovs_interfaceid": "0964ccc2-743e-4ab2-bbee-76f6b55f151e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.575035] env[69992]: INFO nova.compute.manager [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Took 41.25 seconds to build instance. [ 934.646320] env[69992]: DEBUG oslo_vmware.api [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896842, 'name': PowerOffVM_Task, 'duration_secs': 0.282401} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.649667] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.649667] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.649667] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a60fde9-5267-4b91-a95e-0ae69008b9d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.720122] env[69992]: DEBUG nova.network.neutron [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.729209] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.730163] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.730811] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore1] 00b2fd0b-7841-448d-82cf-436aa8d80cda {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.731344] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe9f0089-d92f-40e0-aa65-a270d9d94f6d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.743757] env[69992]: DEBUG oslo_vmware.api [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 934.743757] env[69992]: value = "task-2896846" [ 934.743757] env[69992]: _type = "Task" [ 934.743757] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.758084] env[69992]: DEBUG oslo_vmware.api [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.796346] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.798310] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 934.801347] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.970s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.806234] env[69992]: INFO nova.compute.claims [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.961674] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896844, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.021655] env[69992]: DEBUG oslo_vmware.api [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896841, 'name': PowerOnVM_Task, 'duration_secs': 0.733705} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.022055] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.022316] env[69992]: INFO nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Took 8.77 seconds to spawn the instance on the hypervisor. [ 935.022487] env[69992]: DEBUG nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 935.023411] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a372f214-3724-4131-971b-34507411b143 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.036710] env[69992]: DEBUG nova.network.neutron [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Updating instance_info_cache with network_info: [{"id": "1d0c36a5-d435-4818-94a5-17eca575ea26", "address": "fa:16:3e:6c:24:7d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d0c36a5-d4", "ovs_interfaceid": "1d0c36a5-d435-4818-94a5-17eca575ea26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.053692] env[69992]: DEBUG oslo_concurrency.lockutils [req-82b778a2-b19a-4f5f-9773-7b8b87256e50 req-08e4ba4c-16af-4446-b963-5d8e42025b9a service nova] Releasing lock "refresh_cache-7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.078657] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d80ca475-0fc6-480d-ab16-a2cb175a2bd9 tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "ab3df643-58db-45b7-a572-9c040135989d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.166s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.258084] env[69992]: DEBUG oslo_vmware.api [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2896846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335885} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.258901] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.259081] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.259310] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.259511] env[69992]: INFO nova.compute.manager [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Took 1.16 seconds to destroy the instance on the hypervisor. [ 935.260120] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 935.261120] env[69992]: DEBUG nova.compute.manager [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 935.261466] env[69992]: DEBUG nova.network.neutron [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.314822] env[69992]: DEBUG nova.compute.utils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 935.317163] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 935.318961] env[69992]: DEBUG nova.network.neutron [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.400081] env[69992]: DEBUG nova.policy [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c76ec6aac19f4e74943b20dd5e3224a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46a2d84b932449edb69966a5884990d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 935.461975] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657462} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.462259] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97/7fbab19d-5a0a-4da3-b078-40ca0eaf8c97.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.462478] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.462762] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e689672b-e866-4570-8b05-d506f55846c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.471637] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 935.471637] env[69992]: value = "task-2896847" [ 935.471637] env[69992]: _type = "Task" [ 935.471637] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.483090] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896847, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.542994] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "refresh_cache-94a4a16e-926c-47ce-a5a7-0b216b7c5442" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.543350] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Instance network_info: |[{"id": "1d0c36a5-d435-4818-94a5-17eca575ea26", "address": "fa:16:3e:6c:24:7d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d0c36a5-d4", "ovs_interfaceid": "1d0c36a5-d435-4818-94a5-17eca575ea26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 935.550447] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:24:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d0c36a5-d435-4818-94a5-17eca575ea26', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.563539] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 935.564854] env[69992]: INFO nova.compute.manager [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Took 40.74 seconds to build instance. [ 935.566411] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.566681] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8059a17b-3b04-4304-8662-27071fb5819f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.585844] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.597385] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.597385] env[69992]: value = "task-2896848" [ 935.597385] env[69992]: _type = "Task" [ 935.597385] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.614341] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896848, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.672752] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "714fafbf-a765-4e2c-8633-997d8244483c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.673038] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "714fafbf-a765-4e2c-8633-997d8244483c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.796631] env[69992]: DEBUG nova.network.neutron [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Successfully created port: 7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.821986] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 935.827973] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "bf75484e-4020-48f7-9419-bd88d0462b90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.828598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.828598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.828753] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.828839] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.833478] env[69992]: INFO nova.compute.manager [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Terminating instance [ 935.992624] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896847, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128556} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.996616] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.000520] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfe8524-b80d-4742-871b-23cc2772f46b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.026411] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97/7fbab19d-5a0a-4da3-b078-40ca0eaf8c97.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.030173] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1adea70d-655b-41fd-bc86-e20ae1036d57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.061134] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 936.061134] env[69992]: value = "task-2896849" [ 936.061134] env[69992]: _type = "Task" [ 936.061134] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.076273] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.080882] env[69992]: DEBUG nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Received event network-vif-plugged-1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 936.081108] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Acquiring lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.081314] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.081516] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.081685] env[69992]: DEBUG nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] No waiting events found dispatching network-vif-plugged-1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 936.081843] env[69992]: WARNING nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Received unexpected event network-vif-plugged-1d0c36a5-d435-4818-94a5-17eca575ea26 for instance with vm_state building and task_state spawning. [ 936.081997] env[69992]: DEBUG nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Received event network-changed-59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 936.082159] env[69992]: DEBUG nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Refreshing instance network info cache due to event network-changed-59bdab57-cc8d-40d6-90b5-e7582a1f1500. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 936.082345] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Acquiring lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.082493] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Acquired lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.082620] env[69992]: DEBUG nova.network.neutron [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Refreshing network info cache for port 59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 936.086611] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2b0747b8-ac0f-47ec-8ab1-bc9b5cd1974c tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.841s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.117659] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896848, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.119030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.210803] env[69992]: DEBUG nova.network.neutron [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.237204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.237204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.237204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.237204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.237391] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.238752] env[69992]: INFO nova.compute.manager [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Terminating instance [ 936.310732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "ab3df643-58db-45b7-a572-9c040135989d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.311009] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "ab3df643-58db-45b7-a572-9c040135989d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.311224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "ab3df643-58db-45b7-a572-9c040135989d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.311406] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "ab3df643-58db-45b7-a572-9c040135989d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.311631] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "ab3df643-58db-45b7-a572-9c040135989d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.316453] env[69992]: INFO nova.compute.manager [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Terminating instance [ 936.337783] env[69992]: DEBUG nova.compute.manager [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.340056] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.341013] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9f2343-e82a-4583-9a80-099d564ff33f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.353118] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.353430] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b44ae3da-677c-4a4c-a388-c2ce1fc227b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.362724] env[69992]: DEBUG oslo_vmware.api [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 936.362724] env[69992]: value = "task-2896851" [ 936.362724] env[69992]: _type = "Task" [ 936.362724] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.381835] env[69992]: DEBUG oslo_vmware.api [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.574024] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896849, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.577360] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d26dd16-d260-4290-9b07-63c937b338a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.588739] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f89310c-e5a4-40c2-b15c-4690e6918deb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.593544] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 936.640651] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc5e8ac-53ec-4f72-b59e-6e0f863c150c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.649615] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896848, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.654641] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e26034-9ce1-442b-9b50-ca24e5cce150 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.676101] env[69992]: DEBUG nova.compute.provider_tree [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.713625] env[69992]: INFO nova.compute.manager [-] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Took 1.45 seconds to deallocate network for instance. [ 936.743197] env[69992]: DEBUG nova.compute.manager [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.743327] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.746078] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9154baf-f59a-440f-b397-68d6cd721b3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.757414] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.758150] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-922dcdd1-850a-4aec-9a03-ec9219f6cd7c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.767538] env[69992]: DEBUG oslo_vmware.api [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 936.767538] env[69992]: value = "task-2896852" [ 936.767538] env[69992]: _type = "Task" [ 936.767538] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.784511] env[69992]: DEBUG oslo_vmware.api [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.820987] env[69992]: DEBUG nova.compute.manager [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.821249] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.822219] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642ade94-7247-4b8c-8e0c-5cda3594fab0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.834524] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.834875] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67b67770-e2ae-454a-89df-a8e34f570b17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.837741] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 936.853181] env[69992]: DEBUG oslo_vmware.api [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 936.853181] env[69992]: value = "task-2896853" [ 936.853181] env[69992]: _type = "Task" [ 936.853181] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.875027] env[69992]: DEBUG oslo_vmware.api [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.883234] env[69992]: DEBUG oslo_vmware.api [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896851, 'name': PowerOffVM_Task, 'duration_secs': 0.38688} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.885912] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 936.886162] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.886362] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 936.886602] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.886762] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 936.886918] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 936.887135] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 936.887291] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 936.887468] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 936.887634] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 936.887803] env[69992]: DEBUG nova.virt.hardware [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 936.888143] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.888303] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.889315] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e72bb4f-92ae-45a2-bdb3-296ed5ada7d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.892129] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ebd2940-b45c-43a9-956e-864aa981aba6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.901249] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302e3774-e3b5-42da-9583-e87e80314e79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.987837] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.988425] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.988675] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Deleting the datastore file [datastore2] bf75484e-4020-48f7-9419-bd88d0462b90 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.989107] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c075311-3d80-497b-b6b6-a086d7d01ca9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.999101] env[69992]: DEBUG oslo_vmware.api [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 936.999101] env[69992]: value = "task-2896855" [ 936.999101] env[69992]: _type = "Task" [ 936.999101] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.009694] env[69992]: DEBUG oslo_vmware.api [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.075340] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896849, 'name': ReconfigVM_Task, 'duration_secs': 0.611078} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.075769] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97/7fbab19d-5a0a-4da3-b078-40ca0eaf8c97.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.076835] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ac1521c-b3b4-444f-ab81-921f5e37e006 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.086878] env[69992]: DEBUG nova.network.neutron [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updated VIF entry in instance network info cache for port 59bdab57-cc8d-40d6-90b5-e7582a1f1500. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 937.087618] env[69992]: DEBUG nova.network.neutron [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [{"id": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "address": "fa:16:3e:e6:8b:79", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bdab57-cc", "ovs_interfaceid": "59bdab57-cc8d-40d6-90b5-e7582a1f1500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.090741] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 937.090741] env[69992]: value = "task-2896856" [ 937.090741] env[69992]: _type = "Task" [ 937.090741] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.115500] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896856, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.127205] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896848, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.137391] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.183550] env[69992]: DEBUG nova.scheduler.client.report [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.220884] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.281117] env[69992]: DEBUG oslo_vmware.api [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896852, 'name': PowerOffVM_Task, 'duration_secs': 0.191741} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.281515] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.282027] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.282027] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23bbcfb8-bfdb-4e83-aab5-6c0ddb686103 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.367989] env[69992]: DEBUG oslo_vmware.api [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896853, 'name': PowerOffVM_Task, 'duration_secs': 0.465213} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.370281] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.374041] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.374041] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.374041] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.374041] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Deleting the datastore file [datastore1] 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.374041] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aabc83d1-f550-460b-be66-1fdbc4bb3fb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.374337] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa62db0c-81c1-43ba-b1cd-682f76ba12fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.381681] env[69992]: DEBUG oslo_vmware.api [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for the task: (returnval){ [ 937.381681] env[69992]: value = "task-2896859" [ 937.381681] env[69992]: _type = "Task" [ 937.381681] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.391315] env[69992]: DEBUG oslo_vmware.api [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.443179] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.443415] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.443610] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Deleting the datastore file [datastore1] ab3df643-58db-45b7-a572-9c040135989d {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.443891] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20410154-b1c1-4c78-bc59-94e9ee195185 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.451600] env[69992]: DEBUG oslo_vmware.api [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for the task: (returnval){ [ 937.451600] env[69992]: value = "task-2896860" [ 937.451600] env[69992]: _type = "Task" [ 937.451600] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.464165] env[69992]: DEBUG oslo_vmware.api [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896860, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.510327] env[69992]: DEBUG oslo_vmware.api [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24258} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.510770] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.510850] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.511427] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.511427] env[69992]: INFO nova.compute.manager [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Took 1.17 seconds to destroy the instance on the hypervisor. [ 937.511584] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.511805] env[69992]: DEBUG nova.compute.manager [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.512207] env[69992]: DEBUG nova.network.neutron [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.593042] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Releasing lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.593325] env[69992]: DEBUG nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Received event network-changed-1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.593498] env[69992]: DEBUG nova.compute.manager [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Refreshing instance network info cache due to event network-changed-1d0c36a5-d435-4818-94a5-17eca575ea26. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 937.593776] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Acquiring lock "refresh_cache-94a4a16e-926c-47ce-a5a7-0b216b7c5442" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.593945] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Acquired lock "refresh_cache-94a4a16e-926c-47ce-a5a7-0b216b7c5442" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.594147] env[69992]: DEBUG nova.network.neutron [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Refreshing network info cache for port 1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 937.608244] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896856, 'name': Rename_Task, 'duration_secs': 0.196455} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.608727] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.616156] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58dd05e5-d359-44f3-adea-65dfb3d3b1d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.626852] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896848, 'name': CreateVM_Task, 'duration_secs': 1.600991} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.629189] env[69992]: DEBUG nova.network.neutron [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Successfully updated port: 7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.630533] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.630908] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 937.630908] env[69992]: value = "task-2896861" [ 937.630908] env[69992]: _type = "Task" [ 937.630908] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.632069] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.632239] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.632567] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 937.633073] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77452deb-067a-4678-9a46-92978be277a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.642938] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 937.642938] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52926ee6-ec6d-6c23-7a69-10fba3b04218" [ 937.642938] env[69992]: _type = "Task" [ 937.642938] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.646919] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.657722] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52926ee6-ec6d-6c23-7a69-10fba3b04218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.689532] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.888s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.690344] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 937.694541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.156s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.696213] env[69992]: INFO nova.compute.claims [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.896469] env[69992]: DEBUG oslo_vmware.api [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Task: {'id': task-2896859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240892} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.896761] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.896949] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.897476] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.897476] env[69992]: INFO nova.compute.manager [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 937.897618] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.897857] env[69992]: DEBUG nova.compute.manager [-] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.897916] env[69992]: DEBUG nova.network.neutron [-] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.939576] env[69992]: DEBUG nova.compute.manager [req-b84a538c-2a79-491f-b508-dc2c8e6caa72 req-ce53491e-288f-497e-bc36-b7830307083d service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Received event network-vif-plugged-7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.939836] env[69992]: DEBUG oslo_concurrency.lockutils [req-b84a538c-2a79-491f-b508-dc2c8e6caa72 req-ce53491e-288f-497e-bc36-b7830307083d service nova] Acquiring lock "c205f559-7fe6-4d7e-beba-2fc96b89d705-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.945772] env[69992]: DEBUG oslo_concurrency.lockutils [req-b84a538c-2a79-491f-b508-dc2c8e6caa72 req-ce53491e-288f-497e-bc36-b7830307083d service nova] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.945772] env[69992]: DEBUG oslo_concurrency.lockutils [req-b84a538c-2a79-491f-b508-dc2c8e6caa72 req-ce53491e-288f-497e-bc36-b7830307083d service nova] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.005s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.945772] env[69992]: DEBUG nova.compute.manager [req-b84a538c-2a79-491f-b508-dc2c8e6caa72 req-ce53491e-288f-497e-bc36-b7830307083d service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] No waiting events found dispatching network-vif-plugged-7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 937.945772] env[69992]: WARNING nova.compute.manager [req-b84a538c-2a79-491f-b508-dc2c8e6caa72 req-ce53491e-288f-497e-bc36-b7830307083d service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Received unexpected event network-vif-plugged-7f0ad230-48ec-4413-ac8e-78f4421e792d for instance with vm_state building and task_state spawning. [ 937.970486] env[69992]: DEBUG oslo_vmware.api [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Task: {'id': task-2896860, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249024} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.971035] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.971649] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.972646] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.972989] env[69992]: INFO nova.compute.manager [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] [instance: ab3df643-58db-45b7-a572-9c040135989d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 937.973287] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.973497] env[69992]: DEBUG nova.compute.manager [-] [instance: ab3df643-58db-45b7-a572-9c040135989d] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.973588] env[69992]: DEBUG nova.network.neutron [-] [instance: ab3df643-58db-45b7-a572-9c040135989d] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.132615] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "refresh_cache-c205f559-7fe6-4d7e-beba-2fc96b89d705" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.132894] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired lock "refresh_cache-c205f559-7fe6-4d7e-beba-2fc96b89d705" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.133113] env[69992]: DEBUG nova.network.neutron [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.148323] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896861, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.163183] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52926ee6-ec6d-6c23-7a69-10fba3b04218, 'name': SearchDatastore_Task, 'duration_secs': 0.026173} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.163628] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.163978] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.164348] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.164613] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.164984] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.165362] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a10b565b-805d-4f07-9397-afda4ab12933 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.177852] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.178340] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.179636] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040ef5a9-ec16-4e62-9bcf-403cf5bf382c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.190627] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 938.190627] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521ed27d-df44-d350-63ae-b39bbcb46b7e" [ 938.190627] env[69992]: _type = "Task" [ 938.190627] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.207710] env[69992]: DEBUG nova.compute.utils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 938.211876] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ed27d-df44-d350-63ae-b39bbcb46b7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.217631] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 938.218462] env[69992]: DEBUG nova.network.neutron [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 938.341518] env[69992]: DEBUG nova.policy [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd2db25ef40744d5908197233a0c0f1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8217315011854468b0cc17c4dfe342f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 938.468011] env[69992]: DEBUG nova.network.neutron [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Updated VIF entry in instance network info cache for port 1d0c36a5-d435-4818-94a5-17eca575ea26. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.468547] env[69992]: DEBUG nova.network.neutron [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Updating instance_info_cache with network_info: [{"id": "1d0c36a5-d435-4818-94a5-17eca575ea26", "address": "fa:16:3e:6c:24:7d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d0c36a5-d4", "ovs_interfaceid": "1d0c36a5-d435-4818-94a5-17eca575ea26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.503953] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.504268] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.537756] env[69992]: DEBUG nova.network.neutron [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.647621] env[69992]: DEBUG oslo_vmware.api [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896861, 'name': PowerOnVM_Task, 'duration_secs': 0.687339} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.648249] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.648682] env[69992]: INFO nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Took 9.56 seconds to spawn the instance on the hypervisor. [ 938.648682] env[69992]: DEBUG nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.649993] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52382a72-0932-4776-a095-acd697717e92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.702021] env[69992]: DEBUG nova.network.neutron [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.711179] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ed27d-df44-d350-63ae-b39bbcb46b7e, 'name': SearchDatastore_Task, 'duration_secs': 0.016223} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.713602] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 938.717069] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b155808-bc20-40a4-baec-57d02c0e1ce5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.733342] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 938.733342] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a56d85-0d2a-7b85-d748-e2ae02a5fc08" [ 938.733342] env[69992]: _type = "Task" [ 938.733342] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.750644] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a56d85-0d2a-7b85-d748-e2ae02a5fc08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.974050] env[69992]: DEBUG oslo_concurrency.lockutils [req-ee0f4bd0-0fed-4967-b00a-4a6dc742e8cb req-45ca7e9e-3874-4391-ace7-65987627a66c service nova] Releasing lock "refresh_cache-94a4a16e-926c-47ce-a5a7-0b216b7c5442" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.041696] env[69992]: INFO nova.compute.manager [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Took 1.53 seconds to deallocate network for instance. [ 939.174706] env[69992]: DEBUG nova.network.neutron [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Successfully created port: ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 939.179510] env[69992]: INFO nova.compute.manager [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Took 39.63 seconds to build instance. [ 939.255600] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a56d85-0d2a-7b85-d748-e2ae02a5fc08, 'name': SearchDatastore_Task, 'duration_secs': 0.017612} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.264668] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.264668] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 94a4a16e-926c-47ce-a5a7-0b216b7c5442/94a4a16e-926c-47ce-a5a7-0b216b7c5442.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.266393] env[69992]: DEBUG nova.compute.manager [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Received event network-vif-deleted-369bb007-f861-4b94-a5ac-dd9d835b0fb2 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.266760] env[69992]: DEBUG nova.compute.manager [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.267739] env[69992]: DEBUG nova.compute.manager [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing instance network info cache due to event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 939.267739] env[69992]: DEBUG oslo_concurrency.lockutils [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] Acquiring lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.268083] env[69992]: DEBUG oslo_concurrency.lockutils [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] Acquired lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.268376] env[69992]: DEBUG nova.network.neutron [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.274410] env[69992]: DEBUG nova.network.neutron [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Updating instance_info_cache with network_info: [{"id": "7f0ad230-48ec-4413-ac8e-78f4421e792d", "address": "fa:16:3e:22:d1:a6", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f0ad230-48", "ovs_interfaceid": "7f0ad230-48ec-4413-ac8e-78f4421e792d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.274410] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45e17266-045f-40ca-944c-583cfcc4729e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.296099] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 939.296099] env[69992]: value = "task-2896862" [ 939.296099] env[69992]: _type = "Task" [ 939.296099] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.315653] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.320684] env[69992]: DEBUG nova.network.neutron [-] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.425319] env[69992]: DEBUG nova.network.neutron [-] [instance: ab3df643-58db-45b7-a572-9c040135989d] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.469244] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75280e88-9cf6-4990-9490-6de6e1bf59d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.479352] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b246241e-f51b-4d99-beca-3185f9d16d5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.512341] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd588f9f-871f-4547-ace4-59ed1a67c341 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.521688] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380a2443-08cc-4515-8673-5f18e17245f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.536903] env[69992]: DEBUG nova.compute.provider_tree [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.554184] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.682623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-17f0820d-7d6d-417d-9156-ce53a46201cf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.710s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.733026] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 939.768156] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.768156] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.768821] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.769201] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.769461] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.769706] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.770048] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.770310] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.770575] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.770830] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.771174] env[69992]: DEBUG nova.virt.hardware [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.772759] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aea8f34-a694-4d65-be1b-98bd213efe2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.787144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Releasing lock "refresh_cache-c205f559-7fe6-4d7e-beba-2fc96b89d705" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.787479] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Instance network_info: |[{"id": "7f0ad230-48ec-4413-ac8e-78f4421e792d", "address": "fa:16:3e:22:d1:a6", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f0ad230-48", "ovs_interfaceid": "7f0ad230-48ec-4413-ac8e-78f4421e792d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 939.787995] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:d1:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f0ad230-48ec-4413-ac8e-78f4421e792d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.795571] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.796843] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633e7713-4404-40f0-a8e5-ad5fe11ca328 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.801367] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.801981] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cf8bcca-fb43-4897-b698-eb2c27ca7f10 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.828101] env[69992]: INFO nova.compute.manager [-] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Took 1.93 seconds to deallocate network for instance. [ 939.846805] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.846805] env[69992]: value = "task-2896863" [ 939.846805] env[69992]: _type = "Task" [ 939.846805] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.847574] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896862, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.860245] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896863, 'name': CreateVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.930681] env[69992]: INFO nova.compute.manager [-] [instance: ab3df643-58db-45b7-a572-9c040135989d] Took 1.96 seconds to deallocate network for instance. [ 940.040448] env[69992]: DEBUG nova.scheduler.client.report [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.186263] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 940.206205] env[69992]: DEBUG nova.network.neutron [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updated VIF entry in instance network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.209290] env[69992]: DEBUG nova.network.neutron [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.313770] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.578725} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.314078] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 94a4a16e-926c-47ce-a5a7-0b216b7c5442/94a4a16e-926c-47ce-a5a7-0b216b7c5442.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.314300] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.314622] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38c0981c-ffb1-4e81-8794-e2ebbae5139a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.325093] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 940.325093] env[69992]: value = "task-2896864" [ 940.325093] env[69992]: _type = "Task" [ 940.325093] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.340127] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896864, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.349771] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.361819] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896863, 'name': CreateVM_Task, 'duration_secs': 0.489979} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.361956] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.362902] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.362902] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.363235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 940.363544] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dee8c99-eba9-42b2-8290-bf6a1c8887f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.369548] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 940.369548] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52960597-cdd6-d11f-1678-96e2f4a8d8ae" [ 940.369548] env[69992]: _type = "Task" [ 940.369548] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.379974] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52960597-cdd6-d11f-1678-96e2f4a8d8ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.441784] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.549165] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.855s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.549771] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 940.552894] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.255s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.553111] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.553300] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 940.553598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.994s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.560101] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204408b9-ccc8-4f79-8491-c5ade0db00cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.572065] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43459313-c10a-4cbb-abb9-f17209bfdcc4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.597238] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c96fdb-7623-4bde-89da-9ec88ed7576c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.605880] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f58db0-d9d1-43e8-8a68-df9452dd6837 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.641192] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180024MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 940.641467] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.710471] env[69992]: DEBUG oslo_concurrency.lockutils [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] Releasing lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.710886] env[69992]: DEBUG nova.compute.manager [req-91de759e-1415-40b3-8872-8d44d6255394 req-50023bec-f7a0-4a81-9640-9279a99cf712 service nova] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Received event network-vif-deleted-59bdab57-cc8d-40d6-90b5-e7582a1f1500 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.713341] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.839172] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896864, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105793} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.839491] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.840417] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1db253-2f61-44ea-8ee3-5d856b3314d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.866660] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 94a4a16e-926c-47ce-a5a7-0b216b7c5442/94a4a16e-926c-47ce-a5a7-0b216b7c5442.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.866660] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6466cf9-02f8-4cce-bd95-96ff27b3e7d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.901025] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52960597-cdd6-d11f-1678-96e2f4a8d8ae, 'name': SearchDatastore_Task, 'duration_secs': 0.014186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.901025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.901025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 940.901025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.901571] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.901571] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.901571] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 940.901571] env[69992]: value = "task-2896865" [ 940.901571] env[69992]: _type = "Task" [ 940.901571] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.901571] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab59564f-3115-40fd-8134-4fb17618ed07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.915697] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896865, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.918245] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.918484] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 940.921066] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-858ced00-952f-4845-b1a1-94fe55fef8d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.929113] env[69992]: DEBUG nova.compute.manager [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Received event network-changed-7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.929632] env[69992]: DEBUG nova.compute.manager [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Refreshing instance network info cache due to event network-changed-7f0ad230-48ec-4413-ac8e-78f4421e792d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 940.929704] env[69992]: DEBUG oslo_concurrency.lockutils [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] Acquiring lock "refresh_cache-c205f559-7fe6-4d7e-beba-2fc96b89d705" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.930151] env[69992]: DEBUG oslo_concurrency.lockutils [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] Acquired lock "refresh_cache-c205f559-7fe6-4d7e-beba-2fc96b89d705" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.930151] env[69992]: DEBUG nova.network.neutron [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Refreshing network info cache for port 7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.940747] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 940.940747] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523ddfdd-866c-985c-cdaa-23d4bb06a97f" [ 940.940747] env[69992]: _type = "Task" [ 940.940747] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.959506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "bf75484e-4020-48f7-9419-bd88d0462b90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.960561] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523ddfdd-866c-985c-cdaa-23d4bb06a97f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.060961] env[69992]: DEBUG nova.compute.utils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 941.063460] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 941.065336] env[69992]: DEBUG nova.network.neutron [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 941.163354] env[69992]: DEBUG nova.policy [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd43d38f16c04db1ba46ae836cbbd971', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f3a2959667e41f1b5868994454b21be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 941.380080] env[69992]: DEBUG nova.network.neutron [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Successfully updated port: ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.420172] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896865, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.456660] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523ddfdd-866c-985c-cdaa-23d4bb06a97f, 'name': SearchDatastore_Task, 'duration_secs': 0.017314} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.457585] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43401e26-6f65-434e-bb0d-c6ad078bf835 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.466171] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 941.466171] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5225c300-c2c7-a72e-11f8-31ef9ad28e45" [ 941.466171] env[69992]: _type = "Task" [ 941.466171] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.472515] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquiring lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.472684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.485351] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5225c300-c2c7-a72e-11f8-31ef9ad28e45, 'name': SearchDatastore_Task, 'duration_secs': 0.01597} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.486503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.487000] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c205f559-7fe6-4d7e-beba-2fc96b89d705/c205f559-7fe6-4d7e-beba-2fc96b89d705.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 941.487221] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d34a813-ea37-4bf3-9d8a-89a8a95444bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.499318] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 941.499318] env[69992]: value = "task-2896866" [ 941.499318] env[69992]: _type = "Task" [ 941.499318] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.509637] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.570025] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 941.585980] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "b3d62400-e639-4c49-9207-64fd1e684f99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.586175] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "b3d62400-e639-4c49-9207-64fd1e684f99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.769961] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521390bd-7c0d-fc08-8838-76749e2ae773/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 941.771180] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef373d84-f215-41fd-8676-57235744abb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.778922] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521390bd-7c0d-fc08-8838-76749e2ae773/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 941.779126] env[69992]: ERROR oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521390bd-7c0d-fc08-8838-76749e2ae773/disk-0.vmdk due to incomplete transfer. [ 941.779367] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f9f9fcf3-50a4-4231-a3a3-621b9a552693 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.788228] env[69992]: DEBUG oslo_vmware.rw_handles [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521390bd-7c0d-fc08-8838-76749e2ae773/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 941.788426] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Uploaded image 97dcfec5-7f0a-4cf9-a9e8-12828f18d9c7 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 941.790602] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 941.790908] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-80c2fe19-0ff1-4a8e-87aa-632587d24884 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.793355] env[69992]: DEBUG nova.network.neutron [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Updated VIF entry in instance network info cache for port 7f0ad230-48ec-4413-ac8e-78f4421e792d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.793729] env[69992]: DEBUG nova.network.neutron [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Updating instance_info_cache with network_info: [{"id": "7f0ad230-48ec-4413-ac8e-78f4421e792d", "address": "fa:16:3e:22:d1:a6", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f0ad230-48", "ovs_interfaceid": "7f0ad230-48ec-4413-ac8e-78f4421e792d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.803711] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 941.803711] env[69992]: value = "task-2896867" [ 941.803711] env[69992]: _type = "Task" [ 941.803711] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.817542] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896867, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.821052] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97988b4a-add0-429d-ab5a-e47f60e68910 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.830275] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f368f35-c2db-4640-aacd-08cc9566e6cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.872712] env[69992]: DEBUG nova.network.neutron [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Successfully created port: d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.872712] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87582959-8379-4608-b672-35ad4edfacca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.880823] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9c5588-ba68-4eb4-a9f3-ff424c810978 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.885343] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.885501] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.885650] env[69992]: DEBUG nova.network.neutron [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.899437] env[69992]: DEBUG nova.compute.provider_tree [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.915449] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896865, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.012836] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.028794] env[69992]: DEBUG nova.compute.manager [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.028896] env[69992]: DEBUG nova.compute.manager [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing instance network info cache due to event network-changed-1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 942.029097] env[69992]: DEBUG oslo_concurrency.lockutils [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] Acquiring lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.029246] env[69992]: DEBUG oslo_concurrency.lockutils [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] Acquired lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.029412] env[69992]: DEBUG nova.network.neutron [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Refreshing network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.297308] env[69992]: DEBUG oslo_concurrency.lockutils [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] Releasing lock "refresh_cache-c205f559-7fe6-4d7e-beba-2fc96b89d705" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.297522] env[69992]: DEBUG nova.compute.manager [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Received event network-vif-deleted-a83e70ea-6bd6-4317-a2fd-5170e25fba56 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.297714] env[69992]: DEBUG nova.compute.manager [req-a2db815d-2b1c-4208-8d61-f8ab9e9112b7 req-9b466d25-b6d2-4bd6-8146-ca82074acccb service nova] [instance: ab3df643-58db-45b7-a572-9c040135989d] Received event network-vif-deleted-13280c7f-380d-4f20-b42f-532775cfc598 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.317318] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896867, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.404607] env[69992]: DEBUG nova.scheduler.client.report [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.420119] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896865, 'name': ReconfigVM_Task, 'duration_secs': 1.45201} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.420567] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 94a4a16e-926c-47ce-a5a7-0b216b7c5442/94a4a16e-926c-47ce-a5a7-0b216b7c5442.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.421499] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0bc93bc-5ff8-47ba-954a-74c105225ee3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.426778] env[69992]: DEBUG nova.network.neutron [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.432711] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 942.432711] env[69992]: value = "task-2896868" [ 942.432711] env[69992]: _type = "Task" [ 942.432711] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.445603] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896868, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.512997] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896866, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.586532] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 942.616483] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.616814] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.617166] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.618342] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.618342] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.618342] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.618342] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.618342] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.618557] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.618803] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.619010] env[69992]: DEBUG nova.virt.hardware [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.620451] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0761837d-2834-4a90-a8f3-75a0edf2a1f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.634160] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f48998-8a53-43e5-af16-ed805246b141 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.760672] env[69992]: DEBUG nova.network.neutron [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updating instance_info_cache with network_info: [{"id": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "address": "fa:16:3e:4e:44:12", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0113e0-6f", "ovs_interfaceid": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.820949] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896867, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.946994] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896868, 'name': Rename_Task, 'duration_secs': 0.43818} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.947362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.947563] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88efa20e-f9ad-4d55-9c06-44c12af2ecd8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.955549] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 942.955549] env[69992]: value = "task-2896869" [ 942.955549] env[69992]: _type = "Task" [ 942.955549] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.964134] env[69992]: DEBUG nova.network.neutron [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updated VIF entry in instance network info cache for port 1f86db68-8a81-421c-aa9b-4daab0584c4c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.964134] env[69992]: DEBUG nova.network.neutron [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [{"id": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "address": "fa:16:3e:1d:47:c9", "network": {"id": "3e77044c-b2d9-4469-8bae-4dbd1f752c9c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-482235377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da546e986828460e958e2eed165bf47e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f86db68-8a", "ovs_interfaceid": "1f86db68-8a81-421c-aa9b-4daab0584c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.967111] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.018270] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896866, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.277742} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.018270] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c205f559-7fe6-4d7e-beba-2fc96b89d705/c205f559-7fe6-4d7e-beba-2fc96b89d705.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.018492] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.020036] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b5f70f2-849e-4d4a-9edf-46b0a9263b3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.029604] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 943.029604] env[69992]: value = "task-2896870" [ 943.029604] env[69992]: _type = "Task" [ 943.029604] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.043396] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896870, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.261647] env[69992]: DEBUG nova.compute.manager [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Received event network-vif-plugged-ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.261883] env[69992]: DEBUG oslo_concurrency.lockutils [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] Acquiring lock "eec50935-f553-43c7-b67b-7289299745bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.262113] env[69992]: DEBUG oslo_concurrency.lockutils [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] Lock "eec50935-f553-43c7-b67b-7289299745bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.262287] env[69992]: DEBUG oslo_concurrency.lockutils [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] Lock "eec50935-f553-43c7-b67b-7289299745bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.262455] env[69992]: DEBUG nova.compute.manager [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] No waiting events found dispatching network-vif-plugged-ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 943.262618] env[69992]: WARNING nova.compute.manager [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Received unexpected event network-vif-plugged-ae0113e0-6fd4-44a9-b496-7e09ffb4539b for instance with vm_state building and task_state spawning. [ 943.262774] env[69992]: DEBUG nova.compute.manager [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Received event network-changed-ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.262926] env[69992]: DEBUG nova.compute.manager [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Refreshing instance network info cache due to event network-changed-ae0113e0-6fd4-44a9-b496-7e09ffb4539b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 943.263102] env[69992]: DEBUG oslo_concurrency.lockutils [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] Acquiring lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.263804] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.265214] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Instance network_info: |[{"id": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "address": "fa:16:3e:4e:44:12", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0113e0-6f", "ovs_interfaceid": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 943.265509] env[69992]: DEBUG oslo_concurrency.lockutils [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] Acquired lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.265691] env[69992]: DEBUG nova.network.neutron [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Refreshing network info cache for port ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.266935] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:44:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae0113e0-6fd4-44a9-b496-7e09ffb4539b', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.279450] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating folder: Project (8217315011854468b0cc17c4dfe342f9). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 943.284092] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-460a2685-fe00-4553-be7e-51626028e9f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.298125] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created folder: Project (8217315011854468b0cc17c4dfe342f9) in parent group-v581821. [ 943.298333] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating folder: Instances. Parent ref: group-v581914. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 943.298576] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-635476a1-e56a-4fe9-919a-8d84819e86d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.310832] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created folder: Instances in parent group-v581914. [ 943.311094] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 943.317543] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec50935-f553-43c7-b67b-7289299745bd] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 943.318293] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-667a7470-db7c-45b1-b943-32cd0026bdbf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.345254] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896867, 'name': Destroy_Task, 'duration_secs': 1.124595} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.345872] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Destroyed the VM [ 943.346158] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 943.346405] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.346405] env[69992]: value = "task-2896873" [ 943.346405] env[69992]: _type = "Task" [ 943.346405] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.347451] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2ca536f0-15c4-401c-a47c-758694722d8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.361024] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896873, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.361024] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 943.361024] env[69992]: value = "task-2896874" [ 943.361024] env[69992]: _type = "Task" [ 943.361024] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.369287] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896874, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.420730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.867s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.424271] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.794s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.426892] env[69992]: INFO nova.compute.claims [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.468710] env[69992]: DEBUG oslo_concurrency.lockutils [req-cd4fa5da-c0cb-4b49-ad7c-1bbd54110e59 req-22ec27e7-68c8-4837-80c0-f732bc857e7c service nova] Releasing lock "refresh_cache-ee4c0f2b-44cb-4b37-8e4a-5706b9932144" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.474488] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896869, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.543235] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896870, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073949} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.544132] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.547031] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d856c54-6be4-4427-a97b-01f385309813 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.579268] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] c205f559-7fe6-4d7e-beba-2fc96b89d705/c205f559-7fe6-4d7e-beba-2fc96b89d705.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.580112] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e32fab2-cd3e-4102-b58b-bfe58cfcc72a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.606019] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 943.606019] env[69992]: value = "task-2896875" [ 943.606019] env[69992]: _type = "Task" [ 943.606019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.614409] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896875, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.862420] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896873, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.873681] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896874, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.901715] env[69992]: DEBUG nova.network.neutron [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updated VIF entry in instance network info cache for port ae0113e0-6fd4-44a9-b496-7e09ffb4539b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.902281] env[69992]: DEBUG nova.network.neutron [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updating instance_info_cache with network_info: [{"id": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "address": "fa:16:3e:4e:44:12", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0113e0-6f", "ovs_interfaceid": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.972719] env[69992]: DEBUG oslo_vmware.api [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896869, 'name': PowerOnVM_Task, 'duration_secs': 0.74518} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.973551] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.973818] env[69992]: INFO nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Took 12.13 seconds to spawn the instance on the hypervisor. [ 943.974041] env[69992]: DEBUG nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.975356] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907662ef-0d17-4028-95c7-58e73208e14d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.012874] env[69992]: INFO nova.scheduler.client.report [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleted allocation for migration 085fa7b6-8373-47ff-9061-cee118d126e6 [ 944.115117] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896875, 'name': ReconfigVM_Task, 'duration_secs': 0.346492} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.115434] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Reconfigured VM instance instance-0000001d to attach disk [datastore2] c205f559-7fe6-4d7e-beba-2fc96b89d705/c205f559-7fe6-4d7e-beba-2fc96b89d705.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.116059] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c086ee5f-0f4f-4164-baac-6d1397252ca4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.125594] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 944.125594] env[69992]: value = "task-2896876" [ 944.125594] env[69992]: _type = "Task" [ 944.125594] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.137495] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896876, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.363501] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896873, 'name': CreateVM_Task, 'duration_secs': 0.636793} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.364410] env[69992]: DEBUG nova.network.neutron [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Successfully updated port: d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.372470] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eec50935-f553-43c7-b67b-7289299745bd] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.374032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.374274] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.374521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 944.375272] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55f981ca-0bdd-4a33-960b-a2cabd231135 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.382306] env[69992]: DEBUG oslo_vmware.api [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896874, 'name': RemoveSnapshot_Task, 'duration_secs': 0.846294} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.383739] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 944.384154] env[69992]: INFO nova.compute.manager [None req-82b6b682-565d-439b-a5a4-cee3a956b644 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Took 16.73 seconds to snapshot the instance on the hypervisor. [ 944.387619] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 944.387619] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7cef9-84e0-b399-a1a4-2730ad42184b" [ 944.387619] env[69992]: _type = "Task" [ 944.387619] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.397382] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7cef9-84e0-b399-a1a4-2730ad42184b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.406106] env[69992]: DEBUG oslo_concurrency.lockutils [req-023ce0c9-4677-41d2-9fa1-daf6541e9609 req-68027138-f160-49de-8bbb-5f1b3c90102f service nova] Releasing lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.432948] env[69992]: DEBUG nova.compute.manager [req-3103d079-67c6-440b-8cf3-d767084b812a req-765c00ca-c0d4-4760-8a75-008c513ccb65 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-vif-plugged-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.433188] env[69992]: DEBUG oslo_concurrency.lockutils [req-3103d079-67c6-440b-8cf3-d767084b812a req-765c00ca-c0d4-4760-8a75-008c513ccb65 service nova] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.433395] env[69992]: DEBUG oslo_concurrency.lockutils [req-3103d079-67c6-440b-8cf3-d767084b812a req-765c00ca-c0d4-4760-8a75-008c513ccb65 service nova] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.433564] env[69992]: DEBUG oslo_concurrency.lockutils [req-3103d079-67c6-440b-8cf3-d767084b812a req-765c00ca-c0d4-4760-8a75-008c513ccb65 service nova] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.433729] env[69992]: DEBUG nova.compute.manager [req-3103d079-67c6-440b-8cf3-d767084b812a req-765c00ca-c0d4-4760-8a75-008c513ccb65 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] No waiting events found dispatching network-vif-plugged-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.433890] env[69992]: WARNING nova.compute.manager [req-3103d079-67c6-440b-8cf3-d767084b812a req-765c00ca-c0d4-4760-8a75-008c513ccb65 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received unexpected event network-vif-plugged-d325d681-8643-43a2-93dd-d4687ad115f5 for instance with vm_state building and task_state spawning. [ 944.501481] env[69992]: INFO nova.compute.manager [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Took 44.18 seconds to build instance. [ 944.519451] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49fddcf4-6d67-4994-b419-45fb00d63eb9 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 25.544s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.636381] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896876, 'name': Rename_Task, 'duration_secs': 0.169684} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.640442] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.640952] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7e88f02-f284-4798-b0ad-3766777b6cf8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.656346] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 944.656346] env[69992]: value = "task-2896877" [ 944.656346] env[69992]: _type = "Task" [ 944.656346] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.667402] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.874891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.875227] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.875262] env[69992]: DEBUG nova.network.neutron [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 944.909092] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7cef9-84e0-b399-a1a4-2730ad42184b, 'name': SearchDatastore_Task, 'duration_secs': 0.040757} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.909626] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.909626] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.909937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.910095] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.910269] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.910926] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7439416b-7223-4a95-8e1e-8270b5842954 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.927938] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.928152] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 944.928985] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d678443-cb5f-448d-b256-8f2d1d6e0490 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.942145] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 944.942145] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529062c4-b3d7-760c-a935-8af3b04f207a" [ 944.942145] env[69992]: _type = "Task" [ 944.942145] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.951982] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529062c4-b3d7-760c-a935-8af3b04f207a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.010684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcc6d6f8-c521-49dd-b2c3-8fdc473a5557 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.873s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.134512] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735469ff-b407-4d69-a035-9f8367f7518e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.145593] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ce4a39-d28d-4848-9050-b11e0f88abb5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.187400] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba562a8e-4f83-4c19-bca9-402106119222 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.198060] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896877, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.200022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9839aaf-2764-41ba-8546-253448da147f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.215819] env[69992]: DEBUG nova.compute.provider_tree [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.426926] env[69992]: DEBUG nova.network.neutron [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 945.464667] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529062c4-b3d7-760c-a935-8af3b04f207a, 'name': SearchDatastore_Task, 'duration_secs': 0.017072} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.465465] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-105a1ca7-f8f3-460c-9695-a0fda1e33eb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.474215] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 945.474215] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e5d0f4-00fc-7ffd-3f35-03cb0b07154c" [ 945.474215] env[69992]: _type = "Task" [ 945.474215] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.484350] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e5d0f4-00fc-7ffd-3f35-03cb0b07154c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.514672] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 945.694253] env[69992]: DEBUG oslo_vmware.api [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896877, 'name': PowerOnVM_Task, 'duration_secs': 0.732095} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.694817] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.695195] env[69992]: INFO nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Took 8.86 seconds to spawn the instance on the hypervisor. [ 945.695548] env[69992]: DEBUG nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.696490] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c78a91a-7730-4691-aba0-0b448878ceac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.721014] env[69992]: DEBUG nova.scheduler.client.report [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.809495] env[69992]: DEBUG nova.network.neutron [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.988285] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e5d0f4-00fc-7ffd-3f35-03cb0b07154c, 'name': SearchDatastore_Task, 'duration_secs': 0.029836} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.988878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.989348] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eec50935-f553-43c7-b67b-7289299745bd.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.990094] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1f4c202-946d-4b2c-b250-dbee82504fb8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.003055] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 946.003055] env[69992]: value = "task-2896878" [ 946.003055] env[69992]: _type = "Task" [ 946.003055] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.018563] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.046151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.228083] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.803s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.228083] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 946.231556] env[69992]: INFO nova.compute.manager [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Took 41.04 seconds to build instance. [ 946.233037] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.632s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.233304] env[69992]: DEBUG nova.objects.instance [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lazy-loading 'resources' on Instance uuid 9bab6bf7-43c8-4cc3-b484-4472f1acdf45 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.313310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.313735] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance network_info: |[{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 946.314104] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:cd:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd325d681-8643-43a2-93dd-d4687ad115f5', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.325895] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating folder: Project (3f3a2959667e41f1b5868994454b21be). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.329630] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc308466-00e2-4159-84ef-c105b7a8fa72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.345779] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created folder: Project (3f3a2959667e41f1b5868994454b21be) in parent group-v581821. [ 946.346089] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating folder: Instances. Parent ref: group-v581917. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 946.346393] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc8ea91a-ae30-42ea-8f08-082350783993 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.367178] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created folder: Instances in parent group-v581917. [ 946.367511] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.367743] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.367987] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f46f536-7058-421c-8ca9-fc8ae8d75e3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.397299] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.397299] env[69992]: value = "task-2896881" [ 946.397299] env[69992]: _type = "Task" [ 946.397299] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.407445] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896881, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.521286] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896878, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.694372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "7fc7c481-75e8-40f2-a971-752ce6dde59b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.694838] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.736157] env[69992]: DEBUG nova.compute.utils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.738144] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.738404] env[69992]: DEBUG nova.network.neutron [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.741921] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c2595d8-1310-4c46-b20d-4aa5d716c10d tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.851962] env[69992]: DEBUG nova.policy [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd2db25ef40744d5908197233a0c0f1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8217315011854468b0cc17c4dfe342f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.911159] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896881, 'name': CreateVM_Task, 'duration_secs': 0.471382} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.911349] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 946.912196] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.912401] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.912684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 946.912939] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87eaf5d8-016d-4476-a7e6-a8eef255d87c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.922399] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 946.922399] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524f3045-1a4b-b325-5cc4-e6eb4c107726" [ 946.922399] env[69992]: _type = "Task" [ 946.922399] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.932545] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524f3045-1a4b-b325-5cc4-e6eb4c107726, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.022067] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605636} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.023620] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eec50935-f553-43c7-b67b-7289299745bd.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.023765] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.024421] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f609044-508c-4bee-9d9f-73456ca71dcd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.033793] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 947.033793] env[69992]: value = "task-2896882" [ 947.033793] env[69992]: _type = "Task" [ 947.033793] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.047901] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.242572] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 947.248687] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 947.274087] env[69992]: DEBUG nova.compute.manager [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.274446] env[69992]: DEBUG nova.compute.manager [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing instance network info cache due to event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 947.274715] env[69992]: DEBUG oslo_concurrency.lockutils [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.274869] env[69992]: DEBUG oslo_concurrency.lockutils [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.275084] env[69992]: DEBUG nova.network.neutron [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.410673] env[69992]: DEBUG nova.network.neutron [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Successfully created port: a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.428858] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34708641-6c7b-4ac9-b934-aa9d2869a919 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.441095] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497f2caf-6d95-4fc6-bcd2-e13948becef2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.444339] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524f3045-1a4b-b325-5cc4-e6eb4c107726, 'name': SearchDatastore_Task, 'duration_secs': 0.024326} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.444946] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.445207] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.445442] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.445585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.445760] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.446514] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7fc11740-4336-4b89-abef-f4c02514c39d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.481723] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e39ffc2-8bb6-4729-baf2-1287c4cb304c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.486740] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.486740] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 947.486740] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67c5c21d-7330-4f2e-bf53-7a50de93b598 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.493831] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 947.493831] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5251ba4f-59f4-0350-26d6-34047e85ed86" [ 947.493831] env[69992]: _type = "Task" [ 947.493831] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.495021] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093f248c-8b54-43d4-9b21-5af116238a70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.507588] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5251ba4f-59f4-0350-26d6-34047e85ed86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.515242] env[69992]: DEBUG nova.compute.provider_tree [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.545669] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07395} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.545926] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.546806] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca44206-8d09-4fca-8148-4b6e1cd693b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.573813] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eec50935-f553-43c7-b67b-7289299745bd.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.574122] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43a5d702-9de4-44e1-b86c-353fa132f7bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.595812] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 947.595812] env[69992]: value = "task-2896883" [ 947.595812] env[69992]: _type = "Task" [ 947.595812] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.605122] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.775162] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.987426] env[69992]: DEBUG nova.compute.manager [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.988413] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616bf9e3-d3f3-46eb-a33c-eec4c6a0a539 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.007475] env[69992]: DEBUG nova.network.neutron [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updated VIF entry in instance network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.007828] env[69992]: DEBUG nova.network.neutron [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.018298] env[69992]: DEBUG nova.scheduler.client.report [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.021640] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5251ba4f-59f4-0350-26d6-34047e85ed86, 'name': SearchDatastore_Task, 'duration_secs': 0.04771} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.023094] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92131e20-6d54-4ff0-a9d2-26fedaf10ea8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.031981] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 948.031981] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce1d9b-08ae-7dc4-9ecc-bb6f1d2195c6" [ 948.031981] env[69992]: _type = "Task" [ 948.031981] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.040723] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce1d9b-08ae-7dc4-9ecc-bb6f1d2195c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.106824] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896883, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.261833] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 948.297623] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.297914] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.298115] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.298366] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.298525] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.298702] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.298977] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.299272] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.299496] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.299693] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.300562] env[69992]: DEBUG nova.virt.hardware [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.301480] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a15045-7bdc-4b44-aa1f-4a2216721064 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.310255] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b66a6be-3927-4578-9a55-a4ae6424b5b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.500683] env[69992]: INFO nova.compute.manager [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] instance snapshotting [ 948.503586] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b30b9f8-2f37-4f33-8d2e-4f24cc635024 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.510520] env[69992]: DEBUG oslo_concurrency.lockutils [req-e95662c5-34c7-4392-a3fd-ff72507c483b req-b3e59020-6931-49e8-9f67-b941ddb8b55d service nova] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.524278] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.291s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.526689] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.355s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.528264] env[69992]: INFO nova.compute.claims [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.531625] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab65e1c0-7a95-4ae8-a377-981abc985981 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.544119] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce1d9b-08ae-7dc4-9ecc-bb6f1d2195c6, 'name': SearchDatastore_Task, 'duration_secs': 0.011271} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.545858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.546181] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.548741] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-129ea0c2-d558-485c-ab37-08f89de22e25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.555411] env[69992]: INFO nova.scheduler.client.report [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted allocations for instance 9bab6bf7-43c8-4cc3-b484-4472f1acdf45 [ 948.557568] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 948.557568] env[69992]: value = "task-2896884" [ 948.557568] env[69992]: _type = "Task" [ 948.557568] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.570230] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.606273] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896883, 'name': ReconfigVM_Task, 'duration_secs': 0.546198} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.606540] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Reconfigured VM instance instance-0000001e to attach disk [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eec50935-f553-43c7-b67b-7289299745bd.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.607222] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78a224f0-9aea-4db6-8be9-80582eeecdc4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.615446] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 948.615446] env[69992]: value = "task-2896885" [ 948.615446] env[69992]: _type = "Task" [ 948.615446] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.624238] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896885, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.742994] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "a06d4b38-0e39-46ef-a588-7627661cb201" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.744093] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "a06d4b38-0e39-46ef-a588-7627661cb201" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.052328] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 949.053218] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-119868f4-7461-42de-92f9-bfac758e8a9d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.067177] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 949.067177] env[69992]: value = "task-2896886" [ 949.067177] env[69992]: _type = "Task" [ 949.067177] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.071092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82c8d85b-211a-41a0-a9fe-fb57798ad817 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "9bab6bf7-43c8-4cc3-b484-4472f1acdf45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.475s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.074110] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896884, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.084557] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896886, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.130031] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896885, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.262880] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "c205f559-7fe6-4d7e-beba-2fc96b89d705" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.263302] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.263547] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "c205f559-7fe6-4d7e-beba-2fc96b89d705-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.263743] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.263915] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.269058] env[69992]: INFO nova.compute.manager [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Terminating instance [ 949.307522] env[69992]: DEBUG nova.network.neutron [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Successfully updated port: a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.472571] env[69992]: DEBUG nova.compute.manager [req-e234563b-a70f-4af9-8a6a-4db9ba9ba1ff req-ac585a69-06d0-494c-a875-71bc4b5b4127 service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Received event network-vif-plugged-a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.472571] env[69992]: DEBUG oslo_concurrency.lockutils [req-e234563b-a70f-4af9-8a6a-4db9ba9ba1ff req-ac585a69-06d0-494c-a875-71bc4b5b4127 service nova] Acquiring lock "a8813822-f77b-4b73-a6dc-e0eab83b0402-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.472571] env[69992]: DEBUG oslo_concurrency.lockutils [req-e234563b-a70f-4af9-8a6a-4db9ba9ba1ff req-ac585a69-06d0-494c-a875-71bc4b5b4127 service nova] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.472571] env[69992]: DEBUG oslo_concurrency.lockutils [req-e234563b-a70f-4af9-8a6a-4db9ba9ba1ff req-ac585a69-06d0-494c-a875-71bc4b5b4127 service nova] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.472694] env[69992]: DEBUG nova.compute.manager [req-e234563b-a70f-4af9-8a6a-4db9ba9ba1ff req-ac585a69-06d0-494c-a875-71bc4b5b4127 service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] No waiting events found dispatching network-vif-plugged-a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 949.473357] env[69992]: WARNING nova.compute.manager [req-e234563b-a70f-4af9-8a6a-4db9ba9ba1ff req-ac585a69-06d0-494c-a875-71bc4b5b4127 service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Received unexpected event network-vif-plugged-a225b5fb-43a1-478e-bb4d-0436f27e0475 for instance with vm_state building and task_state spawning. [ 949.571363] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662257} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.574245] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.574623] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.575535] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c6c3fec-bdf1-4360-8e31-66083750e7c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.587375] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896886, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.591081] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 949.591081] env[69992]: value = "task-2896887" [ 949.591081] env[69992]: _type = "Task" [ 949.591081] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.602594] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.627036] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896885, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.773598] env[69992]: DEBUG nova.compute.manager [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.773839] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.774732] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cacdee-2a4c-4395-ba51-ab8d221d1d7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.785907] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.786137] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-717219b2-0319-44b8-b5eb-577eb539c27a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.794198] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 949.794198] env[69992]: value = "task-2896888" [ 949.794198] env[69992]: _type = "Task" [ 949.794198] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.804977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.805249] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.811741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.811833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.811939] env[69992]: DEBUG nova.network.neutron [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.813038] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.087053] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896886, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.110170] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085797} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.110532] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.111658] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfbad9f-0599-406b-b9eb-088a8abbd99f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.150482] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.151970] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3e44d6d-b55b-44eb-87f6-a0f430fb6f09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.166858] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b2a2bf-d4b0-49b2-ac1b-f4fdaae6dbac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.177472] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79326610-fb10-4654-ae0f-ff8246ec2d88 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.180642] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896885, 'name': Rename_Task, 'duration_secs': 1.165152} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.182212] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 950.182527] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 950.182527] env[69992]: value = "task-2896889" [ 950.182527] env[69992]: _type = "Task" [ 950.182527] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.183085] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-375c898c-5daf-43d6-a295-cbab4275bdc0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.216870] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436a2fd2-eba2-40b8-9634-4ae4b1854f3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.222787] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896889, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.224437] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 950.224437] env[69992]: value = "task-2896890" [ 950.224437] env[69992]: _type = "Task" [ 950.224437] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.231099] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bd307f-5c48-486b-a6be-8ddb5d5b5b7c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.237915] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896890, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.248530] env[69992]: DEBUG nova.compute.provider_tree [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.304889] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896888, 'name': PowerOffVM_Task, 'duration_secs': 0.212175} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.305190] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.305370] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.305636] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d6bdd24-96be-45cc-a0b0-8abe9f04d982 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.344620] env[69992]: DEBUG nova.network.neutron [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.375387] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.375628] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.375823] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleting the datastore file [datastore2] c205f559-7fe6-4d7e-beba-2fc96b89d705 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.376099] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41fcd5e2-b434-4c15-afa7-d89a3adb3443 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.382736] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for the task: (returnval){ [ 950.382736] env[69992]: value = "task-2896892" [ 950.382736] env[69992]: _type = "Task" [ 950.382736] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.391852] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.547707] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.547983] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.548232] env[69992]: DEBUG nova.compute.manager [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.552015] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00dab2ec-61bf-413d-a4ec-e4b12526d192 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.556482] env[69992]: DEBUG nova.compute.manager [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 950.557087] env[69992]: DEBUG nova.objects.instance [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lazy-loading 'flavor' on Instance uuid 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.592917] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896886, 'name': CreateSnapshot_Task, 'duration_secs': 1.055972} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.593243] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 950.594075] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e933584-6284-44b4-9a5e-5ffd066576c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.609753] env[69992]: DEBUG nova.network.neutron [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Updating instance_info_cache with network_info: [{"id": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "address": "fa:16:3e:ab:18:87", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa225b5fb-43", "ovs_interfaceid": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.697066] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896889, 'name': ReconfigVM_Task, 'duration_secs': 0.323133} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.697288] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfigured VM instance instance-0000001f to attach disk [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 950.697947] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ccd3b47-43b4-4f18-9003-2766687389d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.706088] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 950.706088] env[69992]: value = "task-2896893" [ 950.706088] env[69992]: _type = "Task" [ 950.706088] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.716447] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896893, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.736014] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896890, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.752393] env[69992]: DEBUG nova.scheduler.client.report [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.894632] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.118653] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 951.119144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.119435] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Instance network_info: |[{"id": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "address": "fa:16:3e:ab:18:87", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa225b5fb-43", "ovs_interfaceid": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 951.119682] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-617bf824-35a8-4c01-becc-1ba37ac1b218 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.122889] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:18:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a225b5fb-43a1-478e-bb4d-0436f27e0475', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.130006] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.130252] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.130860] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f59fbd9-23f2-4e03-a7f9-386c21646d7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.148666] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 951.148666] env[69992]: value = "task-2896894" [ 951.148666] env[69992]: _type = "Task" [ 951.148666] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.155992] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.155992] env[69992]: value = "task-2896895" [ 951.155992] env[69992]: _type = "Task" [ 951.155992] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.159338] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896894, 'name': CloneVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.166652] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896895, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.217560] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896893, 'name': Rename_Task, 'duration_secs': 0.159494} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.217739] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.218716] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1031dc8-87b9-4b50-a8d8-4991be642042 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.225753] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 951.225753] env[69992]: value = "task-2896896" [ 951.225753] env[69992]: _type = "Task" [ 951.225753] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.239829] env[69992]: DEBUG oslo_vmware.api [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896890, 'name': PowerOnVM_Task, 'duration_secs': 0.534902} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.242849] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 951.243078] env[69992]: INFO nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Took 11.51 seconds to spawn the instance on the hypervisor. [ 951.243266] env[69992]: DEBUG nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 951.243548] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.244301] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622d08b6-a1b2-4766-ae42-fcb4839229f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.259350] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.259842] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 951.262607] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.460s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.262834] env[69992]: DEBUG nova.objects.instance [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lazy-loading 'resources' on Instance uuid 1d5722e1-5a48-4212-bbc7-527a3739db6e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.394862] env[69992]: DEBUG oslo_vmware.api [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Task: {'id': task-2896892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.526507} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.395117] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.395304] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.395480] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.395666] env[69992]: INFO nova.compute.manager [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Took 1.62 seconds to destroy the instance on the hypervisor. [ 951.395897] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.396102] env[69992]: DEBUG nova.compute.manager [-] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.396198] env[69992]: DEBUG nova.network.neutron [-] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.498906] env[69992]: DEBUG nova.compute.manager [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Received event network-changed-a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.499145] env[69992]: DEBUG nova.compute.manager [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Refreshing instance network info cache due to event network-changed-a225b5fb-43a1-478e-bb4d-0436f27e0475. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 951.499372] env[69992]: DEBUG oslo_concurrency.lockutils [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] Acquiring lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.499518] env[69992]: DEBUG oslo_concurrency.lockutils [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] Acquired lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.499679] env[69992]: DEBUG nova.network.neutron [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Refreshing network info cache for port a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.565922] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.566375] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dde2c802-6611-4e2d-a027-d416e1747205 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.575625] env[69992]: DEBUG oslo_vmware.api [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 951.575625] env[69992]: value = "task-2896897" [ 951.575625] env[69992]: _type = "Task" [ 951.575625] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.591116] env[69992]: DEBUG oslo_vmware.api [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.662719] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896894, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.676480] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896895, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.743792] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896896, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.765980] env[69992]: DEBUG nova.compute.utils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 951.772193] env[69992]: INFO nova.compute.manager [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Took 44.96 seconds to build instance. [ 951.775089] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 951.775089] env[69992]: DEBUG nova.network.neutron [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.079951] env[69992]: DEBUG nova.policy [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda0dc556ac14ca5beaa1c15314502b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8bfe77f891464a79af99097ad5ac32da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.097341] env[69992]: DEBUG oslo_vmware.api [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896897, 'name': PowerOffVM_Task, 'duration_secs': 0.279688} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.097610] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.097831] env[69992]: DEBUG nova.compute.manager [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.098952] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866f331c-642b-4fc4-89a1-b0fa1a151798 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.163780] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896894, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.176947] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896895, 'name': CreateVM_Task, 'duration_secs': 0.58186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.177337] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.178100] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.179309] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.179309] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 952.179309] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-006f2cb2-27a7-4ad1-85b9-eca27b05f079 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.186244] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 952.186244] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52aecd72-77d8-80c0-7263-2c486b6b8504" [ 952.186244] env[69992]: _type = "Task" [ 952.186244] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.197139] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52aecd72-77d8-80c0-7263-2c486b6b8504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.209682] env[69992]: DEBUG nova.network.neutron [-] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.245851] env[69992]: DEBUG oslo_vmware.api [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2896896, 'name': PowerOnVM_Task, 'duration_secs': 0.597636} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.246235] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.246463] env[69992]: INFO nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Took 9.66 seconds to spawn the instance on the hypervisor. [ 952.246647] env[69992]: DEBUG nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.247720] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dacca3d-41d4-41f2-a379-8e2a50a5eaef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.267492] env[69992]: DEBUG nova.network.neutron [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Updated VIF entry in instance network info cache for port a225b5fb-43a1-478e-bb4d-0436f27e0475. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.267863] env[69992]: DEBUG nova.network.neutron [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Updating instance_info_cache with network_info: [{"id": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "address": "fa:16:3e:ab:18:87", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa225b5fb-43", "ovs_interfaceid": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.272353] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 952.279498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9076bac8-508f-4dca-bd69-460a012ea573 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "eec50935-f553-43c7-b67b-7289299745bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.530s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.461753] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9dea8c-3215-4687-a8e0-b80f3b564ac3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.470360] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ce9f6d-875f-464d-8fb2-a5d1a775583d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.501882] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febbee0d-dbe1-4b30-b4ff-43bb1dc11363 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.512155] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81082b65-e486-48b3-b5a0-1645dbb2db11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.532362] env[69992]: DEBUG nova.compute.provider_tree [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.616971] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd875334-cc40-449c-a55c-c97c1b7b920f tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.069s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.644661] env[69992]: DEBUG nova.network.neutron [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Successfully created port: cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.660885] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896894, 'name': CloneVM_Task, 'duration_secs': 1.293042} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.661218] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Created linked-clone VM from snapshot [ 952.662057] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15feff45-8586-4685-a007-940723b3c3f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.670450] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Uploading image 3038820b-fafa-4d89-b2d1-bbbc8ab32242 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 952.691586] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 952.691586] env[69992]: value = "vm-581921" [ 952.691586] env[69992]: _type = "VirtualMachine" [ 952.691586] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 952.691904] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-86704020-951e-4595-a7a3-fe17b9359887 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.701491] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52aecd72-77d8-80c0-7263-2c486b6b8504, 'name': SearchDatastore_Task, 'duration_secs': 0.020577} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.702854] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.703146] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.703437] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.703617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.703823] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.704173] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lease: (returnval){ [ 952.704173] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52af679c-e07e-acb8-b7e9-5221690a6c55" [ 952.704173] env[69992]: _type = "HttpNfcLease" [ 952.704173] env[69992]: } obtained for exporting VM: (result){ [ 952.704173] env[69992]: value = "vm-581921" [ 952.704173] env[69992]: _type = "VirtualMachine" [ 952.704173] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 952.704634] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the lease: (returnval){ [ 952.704634] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52af679c-e07e-acb8-b7e9-5221690a6c55" [ 952.704634] env[69992]: _type = "HttpNfcLease" [ 952.704634] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 952.704878] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a21d10ab-74a7-4609-bdc5-d0024a7fc483 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.713518] env[69992]: INFO nova.compute.manager [-] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Took 1.32 seconds to deallocate network for instance. [ 952.713776] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 952.713776] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52af679c-e07e-acb8-b7e9-5221690a6c55" [ 952.713776] env[69992]: _type = "HttpNfcLease" [ 952.713776] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 952.719686] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.719686] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.721169] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c22e6970-ed8a-4d47-bfc0-d6c56453c6ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.726599] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 952.726599] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bceab1-b285-4b46-c74e-c5c02289ff36" [ 952.726599] env[69992]: _type = "Task" [ 952.726599] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.738304] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bceab1-b285-4b46-c74e-c5c02289ff36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.771333] env[69992]: DEBUG oslo_concurrency.lockutils [req-dd4a506b-bc3f-45ab-a2f2-5f05ee8d646b req-003f8cea-bfeb-4148-a0be-fc02731cb9dd service nova] Releasing lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.773202] env[69992]: INFO nova.compute.manager [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Took 37.26 seconds to build instance. [ 952.783261] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 953.036071] env[69992]: DEBUG nova.scheduler.client.report [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.215772] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 953.215772] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52af679c-e07e-acb8-b7e9-5221690a6c55" [ 953.215772] env[69992]: _type = "HttpNfcLease" [ 953.215772] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 953.216195] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 953.216195] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52af679c-e07e-acb8-b7e9-5221690a6c55" [ 953.216195] env[69992]: _type = "HttpNfcLease" [ 953.216195] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 953.216895] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745e1f63-5961-4cd3-bb11-335362ae1365 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.221664] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.225664] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52645bb6-3dca-8684-76ba-28970ea98ea9/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 953.225843] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52645bb6-3dca-8684-76ba-28970ea98ea9/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 953.291538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5fc087c-0791-4f29-b7cf-fd6bdce4deb4 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.823s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.292715] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 953.303790] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bceab1-b285-4b46-c74e-c5c02289ff36, 'name': SearchDatastore_Task, 'duration_secs': 0.010748} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.312823] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca47aa4-e24e-4f9a-afa7-bf78a1b19c22 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.318604] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 953.318604] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ab19dd-7c36-4440-7b65-81f260e1d727" [ 953.318604] env[69992]: _type = "Task" [ 953.318604] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.326936] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.333955] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ab19dd-7c36-4440-7b65-81f260e1d727, 'name': SearchDatastore_Task, 'duration_secs': 0.01249} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.336155] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 953.336395] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.336553] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 953.336734] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.336882] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 953.338243] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 953.338243] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 953.338243] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 953.338243] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 953.338243] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 953.338569] env[69992]: DEBUG nova.virt.hardware [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 953.338569] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.338569] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/a8813822-f77b-4b73-a6dc-e0eab83b0402.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 953.342029] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b725066f-b4fc-4a64-8949-e54b9e73b799 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.343910] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3ff2818-e7a6-4012-ae03-f5fb8be6189e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.346982] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f98ae962-aed1-46e2-b0a7-91e38d0cadfd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.355782] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde7e5cb-11ea-4bbd-be58-15ade3de675c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.362008] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 953.362008] env[69992]: value = "task-2896899" [ 953.362008] env[69992]: _type = "Task" [ 953.362008] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.380932] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.424060] env[69992]: DEBUG nova.objects.instance [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lazy-loading 'flavor' on Instance uuid 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.541932] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.279s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.546519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.758s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.546519] env[69992]: DEBUG nova.objects.instance [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 953.575091] env[69992]: INFO nova.scheduler.client.report [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Deleted allocations for instance 1d5722e1-5a48-4212-bbc7-527a3739db6e [ 953.738283] env[69992]: DEBUG nova.compute.manager [req-7b56ae38-6bbf-40e7-b729-8cc33de63b0b req-80d54d22-8c67-4233-9168-0d1d18224d68 service nova] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Received event network-vif-deleted-7f0ad230-48ec-4413-ac8e-78f4421e792d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.807414] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 953.877194] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515555} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.878449] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/a8813822-f77b-4b73-a6dc-e0eab83b0402.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.882025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.882025] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9774fc1-5c9f-4448-8720-146d812fee1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.895165] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 953.895165] env[69992]: value = "task-2896900" [ 953.895165] env[69992]: _type = "Task" [ 953.895165] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.903690] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.935087] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.935254] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquired lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.935435] env[69992]: DEBUG nova.network.neutron [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.935745] env[69992]: DEBUG nova.objects.instance [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lazy-loading 'info_cache' on Instance uuid 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.089974] env[69992]: DEBUG oslo_concurrency.lockutils [None req-676dd989-87a6-4696-8b65-b164bdfbb2ed tempest-ImagesOneServerTestJSON-825997867 tempest-ImagesOneServerTestJSON-825997867-project-member] Lock "1d5722e1-5a48-4212-bbc7-527a3739db6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.227s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.339151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.408452] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075672} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.408991] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.410685] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1253c490-a567-47a3-b995-d5aeb46c2ef0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.446719] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/a8813822-f77b-4b73-a6dc-e0eab83b0402.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.449607] env[69992]: DEBUG nova.network.neutron [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Successfully updated port: cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.452135] env[69992]: DEBUG nova.objects.base [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Object Instance<98cd0eb8-d17a-4a9b-a172-1ba1207168d0> lazy-loaded attributes: flavor,info_cache {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 954.454054] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7ab76af-a77f-4bcb-9e35-1ec9de287afb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.484954] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.485240] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquired lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.485558] env[69992]: DEBUG nova.network.neutron [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.496029] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 954.496029] env[69992]: value = "task-2896901" [ 954.496029] env[69992]: _type = "Task" [ 954.496029] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.511435] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896901, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.558133] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7c5845b1-e774-46a0-a127-759499d024e4 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.566028] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.808s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.566028] env[69992]: INFO nova.compute.claims [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.010100] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896901, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.042585] env[69992]: DEBUG nova.network.neutron [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.275325] env[69992]: DEBUG nova.network.neutron [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updating instance_info_cache with network_info: [{"id": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "address": "fa:16:3e:4d:41:71", "network": {"id": "b7226efb-d53a-4612-877c-74d4b647e592", "bridge": "br-int", "label": "tempest-ServersTestJSON-119651800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bfe77f891464a79af99097ad5ac32da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc179b5e-5d", "ovs_interfaceid": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.301090] env[69992]: DEBUG nova.network.neutron [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Updating instance_info_cache with network_info: [{"id": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "address": "fa:16:3e:63:8a:1d", "network": {"id": "8299bcb6-041b-4758-9ef5-52d0357be7aa", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138938467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "034ac686ad0d438cbe7e56c546f87505", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe91de4c-76", "ovs_interfaceid": "be91de4c-766f-4a66-b07b-2dd3cbe88350", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.513786] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896901, 'name': ReconfigVM_Task, 'duration_secs': 0.629086} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.517125] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Reconfigured VM instance instance-00000020 to attach disk [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/a8813822-f77b-4b73-a6dc-e0eab83b0402.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.517125] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91f06d0c-f799-4abd-998a-6505cfe4e3e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.525840] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 955.525840] env[69992]: value = "task-2896902" [ 955.525840] env[69992]: _type = "Task" [ 955.525840] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.541913] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896902, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.778221] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Releasing lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.778612] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Instance network_info: |[{"id": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "address": "fa:16:3e:4d:41:71", "network": {"id": "b7226efb-d53a-4612-877c-74d4b647e592", "bridge": "br-int", "label": "tempest-ServersTestJSON-119651800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bfe77f891464a79af99097ad5ac32da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc179b5e-5d", "ovs_interfaceid": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 955.779403] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:41:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bff6c3a1-cc80-46ca-86c0-6dbb029edddb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc179b5e-5d8b-49eb-99ea-6adcb9e0af27', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.788104] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Creating folder: Project (8bfe77f891464a79af99097ad5ac32da). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 955.791426] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-779b4b6e-ddac-4b4e-bcd8-2dcdcc6ee827 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.805864] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Releasing lock "refresh_cache-98cd0eb8-d17a-4a9b-a172-1ba1207168d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.816055] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Created folder: Project (8bfe77f891464a79af99097ad5ac32da) in parent group-v581821. [ 955.816362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Creating folder: Instances. Parent ref: group-v581923. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 955.817941] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb99521f-a951-4f8b-92fb-605837df18e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.834455] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Created folder: Instances in parent group-v581923. [ 955.834726] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 955.834953] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0b5ad16-f631-444c-a189-167e34574316] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.838443] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-853956ad-8390-4ce8-93f6-3c588527eb62 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.862709] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.862709] env[69992]: value = "task-2896905" [ 955.862709] env[69992]: _type = "Task" [ 955.862709] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.875668] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896905, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.975324] env[69992]: DEBUG nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 955.975619] env[69992]: DEBUG nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing instance network info cache due to event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 955.975973] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.976338] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.976482] env[69992]: DEBUG nova.network.neutron [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.036879] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896902, 'name': Rename_Task, 'duration_secs': 0.238606} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.037469] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.037771] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4dd0c99-9f7c-4022-916c-b651aff92ed9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.050995] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 956.050995] env[69992]: value = "task-2896906" [ 956.050995] env[69992]: _type = "Task" [ 956.050995] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.063618] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.292949] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3235d5dc-af84-40a7-b61c-a9675836a1e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.304464] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfb2931-dfec-4f3e-8297-3284c91a2526 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.340950] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd07159e-f390-4cc0-b4ff-71f3ee0c13bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.350418] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bf8a51-2d50-42ea-879d-ebe8f24898e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.366655] env[69992]: DEBUG nova.compute.provider_tree [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.377658] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896905, 'name': CreateVM_Task, 'duration_secs': 0.484032} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.377830] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0b5ad16-f631-444c-a189-167e34574316] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.378529] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.378689] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.378996] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 956.379997] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb2feeda-23ee-45e4-bb01-8cebd33a5ab6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.385724] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 956.385724] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d7c662-155a-ce7a-b8c1-f92a9f6b8529" [ 956.385724] env[69992]: _type = "Task" [ 956.385724] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.395326] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d7c662-155a-ce7a-b8c1-f92a9f6b8529, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.571162] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896906, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.833655] env[69992]: DEBUG nova.network.neutron [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updated VIF entry in instance network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.834053] env[69992]: DEBUG nova.network.neutron [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.845287] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.845592] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c29ee7d-8859-48bb-827f-f92e67c23498 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.856157] env[69992]: DEBUG oslo_vmware.api [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 956.856157] env[69992]: value = "task-2896907" [ 956.856157] env[69992]: _type = "Task" [ 956.856157] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.865267] env[69992]: DEBUG oslo_vmware.api [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.874394] env[69992]: DEBUG nova.scheduler.client.report [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.905797] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d7c662-155a-ce7a-b8c1-f92a9f6b8529, 'name': SearchDatastore_Task, 'duration_secs': 0.011238} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.905797] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.905797] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.905797] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.906096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.906096] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.906096] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8856188-77b8-4ebd-a60a-b05b8c15b2b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.918288] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.918489] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.919443] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-352103a0-cfd2-4a7e-b26e-e60750a58e12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.929983] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 956.929983] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d496cb-4054-b504-a006-472078adcf27" [ 956.929983] env[69992]: _type = "Task" [ 956.929983] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.941759] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d496cb-4054-b504-a006-472078adcf27, 'name': SearchDatastore_Task, 'duration_secs': 0.011565} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.942805] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e810c84-4964-4cb9-b6e1-5df697f05fe4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.949071] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 956.949071] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523f433f-7bbd-fb60-907b-d43d325d8a08" [ 956.949071] env[69992]: _type = "Task" [ 956.949071] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.958806] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f433f-7bbd-fb60-907b-d43d325d8a08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.068886] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896906, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.336733] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.336986] env[69992]: DEBUG nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Received event network-vif-plugged-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.337215] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Acquiring lock "e0b5ad16-f631-444c-a189-167e34574316-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.337424] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Lock "e0b5ad16-f631-444c-a189-167e34574316-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.337584] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Lock "e0b5ad16-f631-444c-a189-167e34574316-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.337813] env[69992]: DEBUG nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] No waiting events found dispatching network-vif-plugged-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 957.338010] env[69992]: WARNING nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Received unexpected event network-vif-plugged-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 for instance with vm_state building and task_state spawning. [ 957.338193] env[69992]: DEBUG nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Received event network-changed-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.338433] env[69992]: DEBUG nova.compute.manager [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Refreshing instance network info cache due to event network-changed-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 957.338710] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Acquiring lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.338948] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Acquired lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.339216] env[69992]: DEBUG nova.network.neutron [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Refreshing network info cache for port cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.368734] env[69992]: DEBUG oslo_vmware.api [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896907, 'name': PowerOnVM_Task} progress is 74%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.382118] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.819s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.382702] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 957.388877] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.266s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.388877] env[69992]: INFO nova.compute.claims [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.462230] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f433f-7bbd-fb60-907b-d43d325d8a08, 'name': SearchDatastore_Task, 'duration_secs': 0.010587} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.462230] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.462230] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e0b5ad16-f631-444c-a189-167e34574316/e0b5ad16-f631-444c-a189-167e34574316.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.462230] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3169a7e3-fc04-49ab-b671-f808cebf1cfd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.471753] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 957.471753] env[69992]: value = "task-2896908" [ 957.471753] env[69992]: _type = "Task" [ 957.471753] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.481759] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.565850] env[69992]: DEBUG oslo_vmware.api [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896906, 'name': PowerOnVM_Task, 'duration_secs': 1.083096} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.566178] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.566390] env[69992]: INFO nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Took 9.30 seconds to spawn the instance on the hypervisor. [ 957.566570] env[69992]: DEBUG nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.567552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e261198-530b-444f-8ac4-1743b71c0203 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.868730] env[69992]: DEBUG oslo_vmware.api [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896907, 'name': PowerOnVM_Task, 'duration_secs': 0.770053} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.869490] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.869840] env[69992]: DEBUG nova.compute.manager [None req-ec6191ff-b223-46f8-b38c-d8ca683b921d tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.870802] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c11739-cf59-407e-aa7d-b2143dff5420 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.892631] env[69992]: DEBUG nova.compute.utils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.899511] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 957.899748] env[69992]: DEBUG nova.network.neutron [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 957.974139] env[69992]: DEBUG nova.policy [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfb2b2303d6448da9043701c396a2b4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02824f4021a5400583cf13cd553207fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 957.984187] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511225} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.984187] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e0b5ad16-f631-444c-a189-167e34574316/e0b5ad16-f631-444c-a189-167e34574316.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.985135] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.985632] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f44829b-7421-4d1b-9692-2350b57c5a49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.996144] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 957.996144] env[69992]: value = "task-2896909" [ 957.996144] env[69992]: _type = "Task" [ 957.996144] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.015222] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.090692] env[69992]: INFO nova.compute.manager [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Took 33.48 seconds to build instance. [ 958.168557] env[69992]: DEBUG nova.network.neutron [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updated VIF entry in instance network info cache for port cc179b5e-5d8b-49eb-99ea-6adcb9e0af27. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.168557] env[69992]: DEBUG nova.network.neutron [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updating instance_info_cache with network_info: [{"id": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "address": "fa:16:3e:4d:41:71", "network": {"id": "b7226efb-d53a-4612-877c-74d4b647e592", "bridge": "br-int", "label": "tempest-ServersTestJSON-119651800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bfe77f891464a79af99097ad5ac32da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc179b5e-5d", "ovs_interfaceid": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.406023] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 958.489319] env[69992]: DEBUG nova.network.neutron [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Successfully created port: 2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 958.511841] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139885} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.512123] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.512987] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f696fcc6-1714-485d-a28d-b6b7dd02bb12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.541903] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] e0b5ad16-f631-444c-a189-167e34574316/e0b5ad16-f631-444c-a189-167e34574316.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.545529] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe890b34-f643-49bd-927a-0eb32172c771 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.571659] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 958.571659] env[69992]: value = "task-2896910" [ 958.571659] env[69992]: _type = "Task" [ 958.571659] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.585667] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.595435] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1be89fae-8341-44ac-a87f-acf465c5a46e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.483s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.675980] env[69992]: DEBUG oslo_concurrency.lockutils [req-7e220f67-baae-4f2c-bdee-6075067c963d req-11a51a21-0f4e-40ca-85c9-1a41fc356af7 service nova] Releasing lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.093056] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.101371] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 959.144249] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2562e75-44d1-4d1e-9c5f-1470ec2364a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.153299] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf384cb-ff83-4b11-bfd1-59596ffe19ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.195597] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9bb9894-8598-4b35-8b5a-a808bf1924e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.204144] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99afa8c7-cf1a-4302-8276-94b648933b5b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.222655] env[69992]: DEBUG nova.compute.provider_tree [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.419017] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 959.448160] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 959.448160] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.448160] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 959.448160] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.448362] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 959.448362] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 959.448362] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 959.448362] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 959.448362] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 959.448509] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 959.448626] env[69992]: DEBUG nova.virt.hardware [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 959.449549] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94410e3d-ebae-4ba5-a931-6b273728b57d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.452932] env[69992]: INFO nova.compute.manager [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Rescuing [ 959.453176] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.453328] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.453518] env[69992]: DEBUG nova.network.neutron [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.462870] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf02f9a5-8674-4605-a350-c5426fd18af0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.587069] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896910, 'name': ReconfigVM_Task, 'duration_secs': 0.754192} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.587648] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Reconfigured VM instance instance-00000021 to attach disk [datastore1] e0b5ad16-f631-444c-a189-167e34574316/e0b5ad16-f631-444c-a189-167e34574316.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.588066] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1430f0f9-5c22-42fb-8115-5e07d775218a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.595950] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 959.595950] env[69992]: value = "task-2896911" [ 959.595950] env[69992]: _type = "Task" [ 959.595950] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.609532] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896911, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.625531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.726432] env[69992]: DEBUG nova.scheduler.client.report [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.108304] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896911, 'name': Rename_Task, 'duration_secs': 0.227964} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.108718] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.108819] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-528a6877-8e00-477e-b2ed-92d75ec5346a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.123514] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 960.123514] env[69992]: value = "task-2896912" [ 960.123514] env[69992]: _type = "Task" [ 960.123514] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.138148] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.231991] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.846s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.232600] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 960.235427] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.098s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.236923] env[69992]: INFO nova.compute.claims [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.375603] env[69992]: DEBUG nova.network.neutron [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Updating instance_info_cache with network_info: [{"id": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "address": "fa:16:3e:ab:18:87", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa225b5fb-43", "ovs_interfaceid": "a225b5fb-43a1-478e-bb4d-0436f27e0475", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.447593] env[69992]: DEBUG nova.network.neutron [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Successfully updated port: 2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 960.636076] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.738503] env[69992]: DEBUG nova.compute.manager [req-979446f3-d815-4d11-878c-2662bef79409 req-65988fa9-7dd5-4d63-b7e5-53ec08348a06 service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Received event network-vif-plugged-2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.738743] env[69992]: DEBUG oslo_concurrency.lockutils [req-979446f3-d815-4d11-878c-2662bef79409 req-65988fa9-7dd5-4d63-b7e5-53ec08348a06 service nova] Acquiring lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.738935] env[69992]: DEBUG oslo_concurrency.lockutils [req-979446f3-d815-4d11-878c-2662bef79409 req-65988fa9-7dd5-4d63-b7e5-53ec08348a06 service nova] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.739144] env[69992]: DEBUG oslo_concurrency.lockutils [req-979446f3-d815-4d11-878c-2662bef79409 req-65988fa9-7dd5-4d63-b7e5-53ec08348a06 service nova] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.739317] env[69992]: DEBUG nova.compute.manager [req-979446f3-d815-4d11-878c-2662bef79409 req-65988fa9-7dd5-4d63-b7e5-53ec08348a06 service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] No waiting events found dispatching network-vif-plugged-2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 960.739995] env[69992]: WARNING nova.compute.manager [req-979446f3-d815-4d11-878c-2662bef79409 req-65988fa9-7dd5-4d63-b7e5-53ec08348a06 service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Received unexpected event network-vif-plugged-2dde3583-4d1f-43c5-8824-63e1fab3ee3d for instance with vm_state building and task_state spawning. [ 960.744351] env[69992]: DEBUG nova.compute.utils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 960.747590] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 960.747590] env[69992]: DEBUG nova.network.neutron [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 960.800251] env[69992]: DEBUG nova.policy [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '174f0db471f84f40a2e18bf813e2480e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fc8205ec2e14fdba28998521b552a69', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.879503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-a8813822-f77b-4b73-a6dc-e0eab83b0402" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.954362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "refresh_cache-bcb5131c-b2c6-4971-8a2e-4fcd7133442d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.954529] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "refresh_cache-bcb5131c-b2c6-4971-8a2e-4fcd7133442d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.954700] env[69992]: DEBUG nova.network.neutron [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.099897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "06442c68-7dc6-46a1-9e35-34a62730a555" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.100509] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "06442c68-7dc6-46a1-9e35-34a62730a555" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.138102] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896912, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.139304] env[69992]: DEBUG nova.network.neutron [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Successfully created port: 9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.250208] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 961.503396] env[69992]: DEBUG nova.network.neutron [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 961.641179] env[69992]: DEBUG oslo_vmware.api [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2896912, 'name': PowerOnVM_Task, 'duration_secs': 1.051037} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.641509] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.641718] env[69992]: INFO nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Took 8.35 seconds to spawn the instance on the hypervisor. [ 961.641959] env[69992]: DEBUG nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.642925] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b27771-9b32-4f08-b2eb-837dd671f103 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.711393] env[69992]: DEBUG nova.network.neutron [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Updating instance_info_cache with network_info: [{"id": "2dde3583-4d1f-43c5-8824-63e1fab3ee3d", "address": "fa:16:3e:9e:5d:b8", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dde3583-4d", "ovs_interfaceid": "2dde3583-4d1f-43c5-8824-63e1fab3ee3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.979916] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c3367e-7425-4112-905a-4897d98fd56e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.989657] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf4bc89-20e0-42b8-9b92-7ad9cd980d96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.022046] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3f86c8-e415-4b7c-8b38-ef1f522bab58 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.031220] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0640745d-e898-4ac6-b178-34b972f75ed3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.046243] env[69992]: DEBUG nova.compute.provider_tree [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.164116] env[69992]: INFO nova.compute.manager [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Took 36.01 seconds to build instance. [ 962.214144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "refresh_cache-bcb5131c-b2c6-4971-8a2e-4fcd7133442d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.214479] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Instance network_info: |[{"id": "2dde3583-4d1f-43c5-8824-63e1fab3ee3d", "address": "fa:16:3e:9e:5d:b8", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dde3583-4d", "ovs_interfaceid": "2dde3583-4d1f-43c5-8824-63e1fab3ee3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 962.215280] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:5d:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dde3583-4d1f-43c5-8824-63e1fab3ee3d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.222758] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 962.223200] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.223200] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9bf4c501-1254-4896-9cf7-bec48597cbbb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.248041] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.248041] env[69992]: value = "task-2896913" [ 962.248041] env[69992]: _type = "Task" [ 962.248041] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.259925] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 962.262030] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896913, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.292874] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 962.293058] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.296438] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 962.296438] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.296438] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 962.296438] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 962.296438] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 962.297158] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 962.297158] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 962.297158] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 962.297158] env[69992]: DEBUG nova.virt.hardware [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 962.297158] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0c2d18-da06-4884-8d7d-4bee583552b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.305293] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4275c542-ba1b-4abb-a84d-a9e14b47b42a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.415737] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.416023] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-193d9b26-2ad0-4eaf-8d7d-d6c4aabac8bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.424518] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 962.424518] env[69992]: value = "task-2896914" [ 962.424518] env[69992]: _type = "Task" [ 962.424518] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.433925] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.554297] env[69992]: DEBUG nova.scheduler.client.report [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.667092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9227a00e-b1db-4106-9e56-3bf46d034184 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "e0b5ad16-f631-444c-a189-167e34574316" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.407s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.735608] env[69992]: DEBUG nova.network.neutron [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Successfully updated port: 9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.759868] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896913, 'name': CreateVM_Task, 'duration_secs': 0.498067} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.759868] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 962.760251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.760399] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.761196] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 962.761283] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac529c08-4319-4251-9dcc-e6ad4c699288 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.767623] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 962.767623] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c36427-0900-60a4-69ca-81b2c2768e56" [ 962.767623] env[69992]: _type = "Task" [ 962.767623] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.776848] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c36427-0900-60a4-69ca-81b2c2768e56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.779467] env[69992]: DEBUG nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Received event network-changed-2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.779633] env[69992]: DEBUG nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Refreshing instance network info cache due to event network-changed-2dde3583-4d1f-43c5-8824-63e1fab3ee3d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 962.779886] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Acquiring lock "refresh_cache-bcb5131c-b2c6-4971-8a2e-4fcd7133442d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.779999] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Acquired lock "refresh_cache-bcb5131c-b2c6-4971-8a2e-4fcd7133442d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.780208] env[69992]: DEBUG nova.network.neutron [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Refreshing network info cache for port 2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.939251] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896914, 'name': PowerOffVM_Task, 'duration_secs': 0.287057} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.939251] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.939251] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27337c8-c009-43d7-95dc-824844b02ade {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.960908] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.961188] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.961417] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.961608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.961781] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.963802] env[69992]: INFO nova.compute.manager [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Terminating instance [ 962.967567] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af07a92-e1dd-4685-8bf0-52592b7ff831 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.004687] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.005072] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03a5c3d6-992b-416f-8872-ec35b9ca0101 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.015602] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 963.015602] env[69992]: value = "task-2896915" [ 963.015602] env[69992]: _type = "Task" [ 963.015602] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.025472] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.059816] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.060458] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 963.064026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.843s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.064264] env[69992]: DEBUG nova.objects.instance [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lazy-loading 'resources' on Instance uuid 00b2fd0b-7841-448d-82cf-436aa8d80cda {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.171127] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 963.241477] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.241477] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.241477] env[69992]: DEBUG nova.network.neutron [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.280703] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c36427-0900-60a4-69ca-81b2c2768e56, 'name': SearchDatastore_Task, 'duration_secs': 0.016738} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.281068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.281351] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.281622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.281787] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.281981] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.284412] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccf4b5c2-b0b1-4dc7-867b-cb27b6cff055 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.294983] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.295234] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.296228] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f62e0a26-04b8-423c-b798-2459c136484b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.304650] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 963.304650] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b1a028-085f-e619-e4dc-c23827bb61e9" [ 963.304650] env[69992]: _type = "Task" [ 963.304650] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.313935] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b1a028-085f-e619-e4dc-c23827bb61e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.471735] env[69992]: DEBUG nova.compute.manager [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 963.471967] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.473274] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1adf52-77d3-46fe-829e-f2141943d0e1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.482370] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.482781] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea760a99-6896-430a-905d-59768f12f855 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.490329] env[69992]: DEBUG oslo_vmware.api [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 963.490329] env[69992]: value = "task-2896916" [ 963.490329] env[69992]: _type = "Task" [ 963.490329] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.502372] env[69992]: DEBUG oslo_vmware.api [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.530036] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 963.531181] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.531443] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.567693] env[69992]: DEBUG nova.compute.utils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.567900] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 963.568208] env[69992]: DEBUG nova.network.neutron [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 963.597931] env[69992]: DEBUG nova.network.neutron [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Updated VIF entry in instance network info cache for port 2dde3583-4d1f-43c5-8824-63e1fab3ee3d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.598427] env[69992]: DEBUG nova.network.neutron [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Updating instance_info_cache with network_info: [{"id": "2dde3583-4d1f-43c5-8824-63e1fab3ee3d", "address": "fa:16:3e:9e:5d:b8", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dde3583-4d", "ovs_interfaceid": "2dde3583-4d1f-43c5-8824-63e1fab3ee3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.685550] env[69992]: DEBUG nova.policy [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e7acd70754b4b5d966bcc0662b9a2e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca458056b0794b08b812f0a4106a448c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 963.704106] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.777600] env[69992]: DEBUG nova.network.neutron [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.823372] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b1a028-085f-e619-e4dc-c23827bb61e9, 'name': SearchDatastore_Task, 'duration_secs': 0.013302} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.829144] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d45b21c-4714-484d-951e-39b33626e0a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.835797] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 963.835797] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52982c5c-e589-45b4-2a97-942a8e0d6020" [ 963.835797] env[69992]: _type = "Task" [ 963.835797] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.847394] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52982c5c-e589-45b4-2a97-942a8e0d6020, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.926386] env[69992]: DEBUG nova.network.neutron [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updating instance_info_cache with network_info: [{"id": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "address": "fa:16:3e:c1:93:aa", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a98a5-6c", "ovs_interfaceid": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.005225] env[69992]: DEBUG oslo_vmware.api [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896916, 'name': PowerOffVM_Task, 'duration_secs': 0.245119} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.005512] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.005701] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.005986] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30db4e15-827f-418d-b97a-4c329e9ebd9c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.073987] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 964.079398] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.079687] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.079881] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleting the datastore file [datastore2] 94a4a16e-926c-47ce-a5a7-0b216b7c5442 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.085020] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9916740-03f3-4846-9e5c-2032a9d1f2f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.094044] env[69992]: DEBUG oslo_vmware.api [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 964.094044] env[69992]: value = "task-2896918" [ 964.094044] env[69992]: _type = "Task" [ 964.094044] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.103231] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Releasing lock "refresh_cache-bcb5131c-b2c6-4971-8a2e-4fcd7133442d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.103231] env[69992]: DEBUG nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Received event network-vif-plugged-9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 964.103304] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Acquiring lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.103597] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.103760] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.104019] env[69992]: DEBUG nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] No waiting events found dispatching network-vif-plugged-9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 964.104138] env[69992]: WARNING nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Received unexpected event network-vif-plugged-9f2a98a5-6cca-48ee-84a6-66bf08b7e92f for instance with vm_state building and task_state spawning. [ 964.104303] env[69992]: DEBUG nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Received event network-changed-9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 964.104453] env[69992]: DEBUG nova.compute.manager [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Refreshing instance network info cache due to event network-changed-9f2a98a5-6cca-48ee-84a6-66bf08b7e92f. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 964.104615] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Acquiring lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.108559] env[69992]: DEBUG oslo_vmware.api [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.183023] env[69992]: DEBUG nova.network.neutron [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Successfully created port: b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.262441] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bdd654-b180-4c96-9aef-458230ee3101 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.270436] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c57479-9d5f-4274-aafb-63bdc7384b78 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.303484] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb88940-f0ab-471f-9527-8be89c1e1e5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.314615] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb308147-b836-4e8b-bb3e-300c4f4f5742 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.336330] env[69992]: DEBUG nova.compute.provider_tree [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.348579] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52982c5c-e589-45b4-2a97-942a8e0d6020, 'name': SearchDatastore_Task, 'duration_secs': 0.013284} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.348850] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.349137] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] bcb5131c-b2c6-4971-8a2e-4fcd7133442d/bcb5131c-b2c6-4971-8a2e-4fcd7133442d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.349395] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.349604] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.349824] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f13e16bf-40a5-4383-bd70-a5e37fead2fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.353916] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc2b2216-f392-4bb1-b814-1957a4acf869 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.363302] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 964.363302] env[69992]: value = "task-2896919" [ 964.363302] env[69992]: _type = "Task" [ 964.363302] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.368069] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.368281] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 964.369684] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-182eda74-b94e-4f0d-b2fc-6afdb5969e96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.375842] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.380482] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 964.380482] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528b2271-f3d0-974d-7e38-37d8f7219c09" [ 964.380482] env[69992]: _type = "Task" [ 964.380482] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.389331] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528b2271-f3d0-974d-7e38-37d8f7219c09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.429786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.430182] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Instance network_info: |[{"id": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "address": "fa:16:3e:c1:93:aa", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a98a5-6c", "ovs_interfaceid": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 964.430487] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Acquired lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.430703] env[69992]: DEBUG nova.network.neutron [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Refreshing network info cache for port 9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.432548] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:93:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f2a98a5-6cca-48ee-84a6-66bf08b7e92f', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.440742] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.442341] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.445531] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dabd2d60-feb6-4395-9cd1-73babefb91cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.471641] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.471641] env[69992]: value = "task-2896920" [ 964.471641] env[69992]: _type = "Task" [ 964.471641] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.482949] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896920, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.609246] env[69992]: DEBUG oslo_vmware.api [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2896918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208855} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.609577] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.609838] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.610039] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.610591] env[69992]: INFO nova.compute.manager [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Took 1.14 seconds to destroy the instance on the hypervisor. [ 964.610889] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.611104] env[69992]: DEBUG nova.compute.manager [-] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 964.611196] env[69992]: DEBUG nova.network.neutron [-] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 964.841146] env[69992]: DEBUG nova.scheduler.client.report [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.845931] env[69992]: DEBUG nova.compute.manager [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Received event network-changed-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 964.846155] env[69992]: DEBUG nova.compute.manager [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Refreshing instance network info cache due to event network-changed-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 964.846385] env[69992]: DEBUG oslo_concurrency.lockutils [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] Acquiring lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.846562] env[69992]: DEBUG oslo_concurrency.lockutils [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] Acquired lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.846688] env[69992]: DEBUG nova.network.neutron [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Refreshing network info cache for port cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.878428] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896919, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.898328] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528b2271-f3d0-974d-7e38-37d8f7219c09, 'name': SearchDatastore_Task, 'duration_secs': 0.013786} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.898493] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de3bd760-069e-4a11-8d06-4ec782e5a378 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.911230] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 964.911230] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e8750e-7cad-eabb-c3a7-b2ee4c64687c" [ 964.911230] env[69992]: _type = "Task" [ 964.911230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.919819] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e8750e-7cad-eabb-c3a7-b2ee4c64687c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.988221] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896920, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.085530] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 965.122427] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.122744] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.122902] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.123094] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.123236] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.123376] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.123600] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.123757] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.123914] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.124082] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.124270] env[69992]: DEBUG nova.virt.hardware [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.126110] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423f310a-6ee9-4d2e-97cf-0ca7ac4632b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.137317] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4630aa7b-cd35-4e48-9c8e-2a849585e8f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.157095] env[69992]: DEBUG nova.network.neutron [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updated VIF entry in instance network info cache for port 9f2a98a5-6cca-48ee-84a6-66bf08b7e92f. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 965.157447] env[69992]: DEBUG nova.network.neutron [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updating instance_info_cache with network_info: [{"id": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "address": "fa:16:3e:c1:93:aa", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a98a5-6c", "ovs_interfaceid": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.349876] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.285s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.354397] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.800s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.354739] env[69992]: DEBUG nova.objects.instance [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lazy-loading 'resources' on Instance uuid bf75484e-4020-48f7-9419-bd88d0462b90 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.376070] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896919, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610207} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.376407] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] bcb5131c-b2c6-4971-8a2e-4fcd7133442d/bcb5131c-b2c6-4971-8a2e-4fcd7133442d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.376687] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.377030] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ba20dcc-dab3-41c4-9c50-d4b255a162b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.388305] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 965.388305] env[69992]: value = "task-2896921" [ 965.388305] env[69992]: _type = "Task" [ 965.388305] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.399661] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.422691] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e8750e-7cad-eabb-c3a7-b2ee4c64687c, 'name': SearchDatastore_Task, 'duration_secs': 0.029119} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.423815] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.423815] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. {{(pid=69992) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 965.423957] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-629d0ede-7390-4d1c-bb60-fa07c0ccac0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.435791] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 965.435791] env[69992]: value = "task-2896922" [ 965.435791] env[69992]: _type = "Task" [ 965.435791] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.446701] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.460325] env[69992]: DEBUG nova.network.neutron [-] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.466624] env[69992]: INFO nova.scheduler.client.report [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocations for instance 00b2fd0b-7841-448d-82cf-436aa8d80cda [ 965.486438] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896920, 'name': CreateVM_Task, 'duration_secs': 0.603968} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.486760] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.487597] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.487956] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.488395] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.488771] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7edaa7b3-4192-44f7-9b4b-f4e57099497b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.494694] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 965.494694] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5295b175-af6b-be9b-7206-27566347a43a" [ 965.494694] env[69992]: _type = "Task" [ 965.494694] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.506628] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5295b175-af6b-be9b-7206-27566347a43a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.608015] env[69992]: DEBUG nova.network.neutron [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updated VIF entry in instance network info cache for port cc179b5e-5d8b-49eb-99ea-6adcb9e0af27. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 965.608479] env[69992]: DEBUG nova.network.neutron [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updating instance_info_cache with network_info: [{"id": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "address": "fa:16:3e:4d:41:71", "network": {"id": "b7226efb-d53a-4612-877c-74d4b647e592", "bridge": "br-int", "label": "tempest-ServersTestJSON-119651800-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bfe77f891464a79af99097ad5ac32da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc179b5e-5d", "ovs_interfaceid": "cc179b5e-5d8b-49eb-99ea-6adcb9e0af27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.662446] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a0bc6e3-cb04-4a65-b036-16d65b08969f req-d27fcf7c-4cff-4bec-9991-b88dfc5c3b4f service nova] Releasing lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.905651] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081032} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.909133] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.910692] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6399cb72-754f-465b-a7a9-360e563e0095 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.944497] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] bcb5131c-b2c6-4971-8a2e-4fcd7133442d/bcb5131c-b2c6-4971-8a2e-4fcd7133442d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.947922] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-760f7d22-f660-4eff-8d09-fb6b755803de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.970254] env[69992]: INFO nova.compute.manager [-] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Took 1.36 seconds to deallocate network for instance. [ 965.996485] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896922, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.996926] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 965.996926] env[69992]: value = "task-2896923" [ 965.996926] env[69992]: _type = "Task" [ 965.996926] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.998881] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9c499cb9-e9d1-4616-a413-58ea9babc19d tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "00b2fd0b-7841-448d-82cf-436aa8d80cda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.416s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.010935] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52645bb6-3dca-8684-76ba-28970ea98ea9/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 966.011919] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a3e1e7-56af-44a1-9bb3-15e4f923089c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.025021] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.025355] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5295b175-af6b-be9b-7206-27566347a43a, 'name': SearchDatastore_Task, 'duration_secs': 0.01208} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.026290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.026549] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.026874] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.027065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.027251] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.027514] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee95e0a3-48d4-4e22-ae26-e34986af7bb4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.031343] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52645bb6-3dca-8684-76ba-28970ea98ea9/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 966.031463] env[69992]: ERROR oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52645bb6-3dca-8684-76ba-28970ea98ea9/disk-0.vmdk due to incomplete transfer. [ 966.032122] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-efe9d56c-24a7-44ad-83bc-d4faf1a92e3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.045020] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.045020] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.045020] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52645bb6-3dca-8684-76ba-28970ea98ea9/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 966.045020] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Uploaded image 3038820b-fafa-4d89-b2d1-bbbc8ab32242 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 966.047368] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 966.047894] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-735fb5a3-df39-4c45-8e89-59bb04b6471f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.051743] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b2a4efba-7cea-4655-88fb-ecc86f9b85f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.060031] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 966.060031] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52436ec6-45b7-6506-5d14-7b01091c6216" [ 966.060031] env[69992]: _type = "Task" [ 966.060031] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.061791] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 966.061791] env[69992]: value = "task-2896924" [ 966.061791] env[69992]: _type = "Task" [ 966.061791] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.071625] env[69992]: DEBUG nova.compute.manager [req-bdd45cd5-ce01-42db-a7ba-93e1a3c66d0f req-5227cafc-dabe-440b-870a-9c58071e3bd8 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Received event network-vif-plugged-b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 966.071920] env[69992]: DEBUG oslo_concurrency.lockutils [req-bdd45cd5-ce01-42db-a7ba-93e1a3c66d0f req-5227cafc-dabe-440b-870a-9c58071e3bd8 service nova] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.072162] env[69992]: DEBUG oslo_concurrency.lockutils [req-bdd45cd5-ce01-42db-a7ba-93e1a3c66d0f req-5227cafc-dabe-440b-870a-9c58071e3bd8 service nova] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.072562] env[69992]: DEBUG oslo_concurrency.lockutils [req-bdd45cd5-ce01-42db-a7ba-93e1a3c66d0f req-5227cafc-dabe-440b-870a-9c58071e3bd8 service nova] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.072562] env[69992]: DEBUG nova.compute.manager [req-bdd45cd5-ce01-42db-a7ba-93e1a3c66d0f req-5227cafc-dabe-440b-870a-9c58071e3bd8 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] No waiting events found dispatching network-vif-plugged-b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 966.072674] env[69992]: WARNING nova.compute.manager [req-bdd45cd5-ce01-42db-a7ba-93e1a3c66d0f req-5227cafc-dabe-440b-870a-9c58071e3bd8 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Received unexpected event network-vif-plugged-b39fa912-b02a-4764-8cc8-f79e08d575c6 for instance with vm_state building and task_state spawning. [ 966.079482] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52436ec6-45b7-6506-5d14-7b01091c6216, 'name': SearchDatastore_Task, 'duration_secs': 0.011791} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.080501] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ee8c4f9-dd17-4350-9e22-ca400c38f503 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.087895] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896924, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.092675] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 966.092675] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52233eec-44a9-bd28-79e5-54a1d21b2087" [ 966.092675] env[69992]: _type = "Task" [ 966.092675] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.105867] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52233eec-44a9-bd28-79e5-54a1d21b2087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.111891] env[69992]: DEBUG oslo_concurrency.lockutils [req-2a88149e-5f77-4631-8aec-0825f6d79f0a req-e69dd747-dbf0-45ba-949c-bb3682e2bf53 service nova] Releasing lock "refresh_cache-e0b5ad16-f631-444c-a189-167e34574316" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.198583] env[69992]: DEBUG nova.network.neutron [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Successfully updated port: b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 966.457595] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573411} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.457881] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. [ 966.458668] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9192bce4-23ea-449e-a21d-2449b3daadae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.488248] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.491708] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.491962] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8bbca62-b205-4a85-a80d-577ba21ddccd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.515168] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896923, 'name': ReconfigVM_Task, 'duration_secs': 0.448918} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.518619] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Reconfigured VM instance instance-00000022 to attach disk [datastore1] bcb5131c-b2c6-4971-8a2e-4fcd7133442d/bcb5131c-b2c6-4971-8a2e-4fcd7133442d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.519308] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 966.519308] env[69992]: value = "task-2896925" [ 966.519308] env[69992]: _type = "Task" [ 966.519308] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.520475] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c9d1466-17b5-4197-b4dd-7b0703763dd5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.530332] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896925, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.534493] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 966.534493] env[69992]: value = "task-2896926" [ 966.534493] env[69992]: _type = "Task" [ 966.534493] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.542999] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896926, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.544883] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f893a2-2e03-43e7-997d-4e913770dfde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.553022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd375e35-4842-4079-9c53-246d962ee3b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.587777] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d683b1bf-df28-4212-a421-503ee6fc0529 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.602671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e2c328-f417-4679-974a-8bbce5c3dab2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.606825] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896924, 'name': Destroy_Task, 'duration_secs': 0.436333} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.607478] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Destroyed the VM [ 966.607768] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 966.608638] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-017795e8-f36e-43d5-bc0a-06853faa05e1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.622941] env[69992]: DEBUG nova.compute.provider_tree [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.624926] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52233eec-44a9-bd28-79e5-54a1d21b2087, 'name': SearchDatastore_Task, 'duration_secs': 0.014701} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.626691] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.627086] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/97cb6372-3f4e-427d-9509-7e6c43aa2e7b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.627431] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 966.627431] env[69992]: value = "task-2896927" [ 966.627431] env[69992]: _type = "Task" [ 966.627431] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.627642] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db536e6b-5504-4e63-9b92-5b07b606b16c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.640668] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896927, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.642064] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 966.642064] env[69992]: value = "task-2896928" [ 966.642064] env[69992]: _type = "Task" [ 966.642064] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.651879] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.703132] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.703132] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.703132] env[69992]: DEBUG nova.network.neutron [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.874567] env[69992]: DEBUG nova.compute.manager [req-d0b1947d-022e-4982-ba7b-6074808af2ce req-87c95273-45c0-4555-a7b5-4643240b714c service nova] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Received event network-vif-deleted-1d0c36a5-d435-4818-94a5-17eca575ea26 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.032628] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896925, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.044739] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896926, 'name': Rename_Task, 'duration_secs': 0.219267} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.045047] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.045323] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b32d2e7-12f5-408f-a7b1-a02380a9153e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.053622] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 967.053622] env[69992]: value = "task-2896929" [ 967.053622] env[69992]: _type = "Task" [ 967.053622] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.064032] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.128649] env[69992]: DEBUG nova.scheduler.client.report [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.143107] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896927, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.153574] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487274} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.153830] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/97cb6372-3f4e-427d-9509-7e6c43aa2e7b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.154051] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.154308] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8ceb10f-0a88-4787-a47e-6cd08b5b9e71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.162331] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 967.162331] env[69992]: value = "task-2896930" [ 967.162331] env[69992]: _type = "Task" [ 967.162331] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.170181] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.236114] env[69992]: DEBUG nova.network.neutron [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.421979] env[69992]: DEBUG nova.network.neutron [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.532125] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896925, 'name': ReconfigVM_Task, 'duration_secs': 0.54943} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.532477] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Reconfigured VM instance instance-00000020 to attach disk [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.533335] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0ca29a-756b-4657-ad6f-da2372fbd9b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.563836] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7278617-4f84-4d34-9975-939c39a9817a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.580774] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896929, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.582234] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 967.582234] env[69992]: value = "task-2896931" [ 967.582234] env[69992]: _type = "Task" [ 967.582234] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.590592] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896931, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.637136] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.283s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.639361] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.290s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.639588] env[69992]: DEBUG nova.objects.instance [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lazy-loading 'resources' on Instance uuid 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.646840] env[69992]: DEBUG oslo_vmware.api [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896927, 'name': RemoveSnapshot_Task, 'duration_secs': 0.709829} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.647627] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 967.648136] env[69992]: INFO nova.compute.manager [None req-a3f8d7bf-b97e-45e9-ad92-319e0e5e6bbf tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Took 19.14 seconds to snapshot the instance on the hypervisor. [ 967.662507] env[69992]: INFO nova.scheduler.client.report [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Deleted allocations for instance bf75484e-4020-48f7-9419-bd88d0462b90 [ 967.676643] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080411} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.676926] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.677767] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895ab9e1-fea9-41e7-be7c-7d072e232436 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.701687] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/97cb6372-3f4e-427d-9509-7e6c43aa2e7b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.702526] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c39d6711-ae9e-4500-a24e-394b7e033f16 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.724540] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 967.724540] env[69992]: value = "task-2896932" [ 967.724540] env[69992]: _type = "Task" [ 967.724540] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.737771] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896932, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.925670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.926200] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Instance network_info: |[{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 967.926509] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:93:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b39fa912-b02a-4764-8cc8-f79e08d575c6', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.934016] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating folder: Project (ca458056b0794b08b812f0a4106a448c). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 967.934311] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66f40ada-b618-4280-ab98-9ea27b18d02e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.954112] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created folder: Project (ca458056b0794b08b812f0a4106a448c) in parent group-v581821. [ 967.954337] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating folder: Instances. Parent ref: group-v581928. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 967.954580] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b77a2174-cee4-4314-a249-782febc65281 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.964548] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created folder: Instances in parent group-v581928. [ 967.964782] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 967.964969] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.965203] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-343ed738-97fe-4c5c-a878-de0b115e037e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.984752] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.984752] env[69992]: value = "task-2896935" [ 967.984752] env[69992]: _type = "Task" [ 967.984752] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.994954] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896935, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.065558] env[69992]: DEBUG oslo_vmware.api [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2896929, 'name': PowerOnVM_Task, 'duration_secs': 0.546145} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.065832] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.066160] env[69992]: INFO nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Took 8.65 seconds to spawn the instance on the hypervisor. [ 968.066458] env[69992]: DEBUG nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.067232] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135cc2cf-eb28-420c-b173-f18b8fdee7ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.091972] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896931, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.173513] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8f9ddc7a-d47a-47ed-a22c-4b90d407857a tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.345s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.174693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 27.215s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.175019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.175882] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.176184] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.179906] env[69992]: INFO nova.compute.manager [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Terminating instance [ 968.237265] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.285358] env[69992]: DEBUG nova.compute.manager [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Received event network-changed-b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.285584] env[69992]: DEBUG nova.compute.manager [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Refreshing instance network info cache due to event network-changed-b39fa912-b02a-4764-8cc8-f79e08d575c6. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 968.285791] env[69992]: DEBUG oslo_concurrency.lockutils [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.285937] env[69992]: DEBUG oslo_concurrency.lockutils [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.286133] env[69992]: DEBUG nova.network.neutron [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Refreshing network info cache for port b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.496555] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896935, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.590244] env[69992]: INFO nova.compute.manager [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Took 35.86 seconds to build instance. [ 968.595866] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896931, 'name': ReconfigVM_Task, 'duration_secs': 0.573829} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.596184] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 968.596491] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e33e5a5e-91e9-4982-9dfb-70133b3642bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.605125] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 968.605125] env[69992]: value = "task-2896936" [ 968.605125] env[69992]: _type = "Task" [ 968.605125] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.614464] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.668912] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe8e609-f01e-4413-8c0d-b6db98b58e9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.676873] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7897f9aa-34c6-4cfc-9c5f-a5b35b8d39be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.683642] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.683930] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquired lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.684876] env[69992]: DEBUG nova.network.neutron [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.712765] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d4a434-73d5-4c05-a3e7-f6effddac655 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.721584] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e061945-6620-4eb3-9700-aed49b5bb1ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.740115] env[69992]: DEBUG nova.compute.provider_tree [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.749214] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896932, 'name': ReconfigVM_Task, 'duration_secs': 1.023901} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.750229] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/97cb6372-3f4e-427d-9509-7e6c43aa2e7b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.751019] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8eaa1530-5801-450b-9dae-47a07d25566f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.760309] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 968.760309] env[69992]: value = "task-2896937" [ 968.760309] env[69992]: _type = "Task" [ 968.760309] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.770525] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896937, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.900819] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.900819] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.901093] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.901171] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.901359] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.903761] env[69992]: INFO nova.compute.manager [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Terminating instance [ 968.999181] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896935, 'name': CreateVM_Task, 'duration_secs': 0.795735} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.999398] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 969.000273] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.000547] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.001027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 969.001413] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ad96618-f06f-4e2a-96eb-d0ebd51326fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.008388] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 969.008388] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52960b0e-5800-1e64-b6d7-8b33d877c7be" [ 969.008388] env[69992]: _type = "Task" [ 969.008388] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.020546] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52960b0e-5800-1e64-b6d7-8b33d877c7be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.074374] env[69992]: DEBUG nova.network.neutron [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updated VIF entry in instance network info cache for port b39fa912-b02a-4764-8cc8-f79e08d575c6. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.074805] env[69992]: DEBUG nova.network.neutron [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.092651] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f9584699-a182-4fe8-a161-07ef328c4623 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.136s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.116652] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896936, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.211956] env[69992]: DEBUG nova.compute.utils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Can not refresh info_cache because instance was not found {{(pid=69992) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 969.241529] env[69992]: DEBUG nova.network.neutron [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 969.244320] env[69992]: DEBUG nova.scheduler.client.report [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.274300] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896937, 'name': Rename_Task, 'duration_secs': 0.285821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.274688] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.274959] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0129ffc3-28f0-4ed5-b0c4-da2e1b14ecec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.282737] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 969.282737] env[69992]: value = "task-2896938" [ 969.282737] env[69992]: _type = "Task" [ 969.282737] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.292212] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.386506] env[69992]: DEBUG nova.network.neutron [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.408722] env[69992]: DEBUG nova.compute.manager [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 969.408722] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.410378] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ddb5ac-b82b-4af9-93f3-f34bb4970f79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.421100] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.421401] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78d04565-b1dd-4bad-8744-8f1f637c2a8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.431455] env[69992]: DEBUG oslo_vmware.api [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 969.431455] env[69992]: value = "task-2896939" [ 969.431455] env[69992]: _type = "Task" [ 969.431455] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.441700] env[69992]: DEBUG oslo_vmware.api [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.520318] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52960b0e-5800-1e64-b6d7-8b33d877c7be, 'name': SearchDatastore_Task, 'duration_secs': 0.013443} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.520659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.520896] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.521151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.521299] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.521479] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.521755] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bd025fa-1ca2-4ed4-b984-3b2ab17e1b81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.533745] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.533938] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.534700] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ace215c-81ce-4c19-ae07-ba9228a22e6b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.541896] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 969.541896] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d78656-6554-5ae1-db77-7ea72667411c" [ 969.541896] env[69992]: _type = "Task" [ 969.541896] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.551054] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d78656-6554-5ae1-db77-7ea72667411c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.577838] env[69992]: DEBUG oslo_concurrency.lockutils [req-12a5fbdc-96a5-4c8b-a11a-6e0c330452e6 req-fccaaf72-68bc-49be-b088-dfcbe4f81995 service nova] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.595936] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.616009] env[69992]: DEBUG oslo_vmware.api [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896936, 'name': PowerOnVM_Task, 'duration_secs': 0.681572} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.616405] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 969.619643] env[69992]: DEBUG nova.compute.manager [None req-25811703-555a-4121-9c3b-29c982180e21 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 969.620586] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817594fb-64f3-4bbd-a8e0-87fe8859e429 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.749881] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.753686] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.311s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.753686] env[69992]: DEBUG nova.objects.instance [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lazy-loading 'resources' on Instance uuid ab3df643-58db-45b7-a572-9c040135989d {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.773766] env[69992]: INFO nova.scheduler.client.report [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Deleted allocations for instance 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2 [ 969.799170] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896938, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.889658] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Releasing lock "refresh_cache-bf75484e-4020-48f7-9419-bd88d0462b90" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.890791] env[69992]: DEBUG nova.compute.manager [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 969.890791] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.891040] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6798fd56-522b-4e42-a438-f1bd338922b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.902273] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb7b383-2501-4429-a384-dc8b5c905151 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.949531] env[69992]: WARNING nova.virt.vmwareapi.vmops [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf75484e-4020-48f7-9419-bd88d0462b90 could not be found. [ 969.949531] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.949657] env[69992]: INFO nova.compute.manager [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Took 0.06 seconds to destroy the instance on the hypervisor. [ 969.949929] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.953216] env[69992]: DEBUG nova.compute.manager [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.953454] env[69992]: DEBUG nova.network.neutron [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.961119] env[69992]: DEBUG oslo_vmware.api [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896939, 'name': PowerOffVM_Task, 'duration_secs': 0.217079} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.961362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.961524] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.961765] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-333bf107-cbc9-467c-971e-7a5e9943445a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.979654] env[69992]: DEBUG nova.network.neutron [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.037808] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.038057] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.038234] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Deleting the datastore file [datastore1] ee4c0f2b-44cb-4b37-8e4a-5706b9932144 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.038499] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1223cbe-3df7-439c-8e88-e70e401fd69b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.051068] env[69992]: DEBUG oslo_vmware.api [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for the task: (returnval){ [ 970.051068] env[69992]: value = "task-2896941" [ 970.051068] env[69992]: _type = "Task" [ 970.051068] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.060471] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d78656-6554-5ae1-db77-7ea72667411c, 'name': SearchDatastore_Task, 'duration_secs': 0.014416} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.061723] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d19d85ed-63cf-4357-ad38-136d6be4e2db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.068456] env[69992]: DEBUG oslo_vmware.api [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.073173] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 970.073173] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524aca78-a96e-16e2-7373-f3dff5cf045d" [ 970.073173] env[69992]: _type = "Task" [ 970.073173] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.083199] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524aca78-a96e-16e2-7373-f3dff5cf045d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.119731] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.140744] env[69992]: DEBUG nova.compute.manager [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.141199] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d0c1c2-c690-4ab2-ae96-2762477f16bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.283058] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aad64737-e480-452d-9a7e-c980bdee1174 tempest-TenantUsagesTestJSON-1178551802 tempest-TenantUsagesTestJSON-1178551802-project-member] Lock "1f9d0558-63fb-4a6f-a2d2-dd7a334249a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.047s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.293626] env[69992]: DEBUG oslo_vmware.api [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896938, 'name': PowerOnVM_Task, 'duration_secs': 0.575838} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.293887] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.294197] env[69992]: INFO nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Took 8.03 seconds to spawn the instance on the hypervisor. [ 970.294460] env[69992]: DEBUG nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.295411] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1dd71c-3143-452a-a7a1-e7f2a2137621 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.483583] env[69992]: DEBUG nova.network.neutron [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.548090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "673be00f-e3c5-4a54-beeb-cf89828e9e32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.548090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.565519] env[69992]: DEBUG oslo_vmware.api [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Task: {'id': task-2896941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189595} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.565925] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.568848] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.568848] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.568848] env[69992]: INFO nova.compute.manager [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Took 1.16 seconds to destroy the instance on the hypervisor. [ 970.568848] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.568848] env[69992]: DEBUG nova.compute.manager [-] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.569287] env[69992]: DEBUG nova.network.neutron [-] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 970.591776] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524aca78-a96e-16e2-7373-f3dff5cf045d, 'name': SearchDatastore_Task, 'duration_secs': 0.012393} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.592118] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.592479] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039/a7f01cd7-f148-48fc-a71a-5461672d6039.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.593015] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44762863-a9d6-4b45-9b3a-3516e8f9ba6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.606372] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 970.606372] env[69992]: value = "task-2896942" [ 970.606372] env[69992]: _type = "Task" [ 970.606372] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.616875] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.654257] env[69992]: INFO nova.compute.manager [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] instance snapshotting [ 970.657473] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e346163-1e2f-4e5c-b14d-570977fac5da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.685721] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb40ca-02d7-4a46-9f5d-283dc2a1417b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.826259] env[69992]: INFO nova.compute.manager [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Took 34.74 seconds to build instance. [ 970.939782] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a235d-dd94-476b-a7ca-933e9f58aa67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.949437] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807bd15b-807d-41b8-9f79-0a3720f15ed6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.984838] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742ef835-9f75-4d67-a500-e34e355245bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.987906] env[69992]: INFO nova.compute.manager [-] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Took 1.03 seconds to deallocate network for instance. [ 970.997310] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178360ec-2218-4ff8-9621-28e52fefb161 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.016711] env[69992]: DEBUG nova.compute.provider_tree [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.119239] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896942, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.161719] env[69992]: DEBUG nova.compute.manager [req-b134b1b3-e4fa-4bb3-a519-127af8c84cc2 req-9c9a54f0-3a26-4fdf-8798-b31605619fe2 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Received event network-vif-deleted-1f86db68-8a81-421c-aa9b-4daab0584c4c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 971.162053] env[69992]: INFO nova.compute.manager [req-b134b1b3-e4fa-4bb3-a519-127af8c84cc2 req-9c9a54f0-3a26-4fdf-8798-b31605619fe2 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Neutron deleted interface 1f86db68-8a81-421c-aa9b-4daab0584c4c; detaching it from the instance and deleting it from the info cache [ 971.162381] env[69992]: DEBUG nova.network.neutron [req-b134b1b3-e4fa-4bb3-a519-127af8c84cc2 req-9c9a54f0-3a26-4fdf-8798-b31605619fe2 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.199398] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 971.200317] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-160a2132-26d6-4bcb-919d-f6a10f7f0fe4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.212350] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 971.212350] env[69992]: value = "task-2896943" [ 971.212350] env[69992]: _type = "Task" [ 971.212350] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.224091] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896943, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.329150] env[69992]: DEBUG oslo_concurrency.lockutils [None req-061c8893-c26d-4acd-bb25-05609a003b05 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.156s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.497044] env[69992]: INFO nova.compute.manager [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance disappeared during terminate [ 971.497237] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a6a43447-1eaf-4461-a987-1b83fc045d89 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "bf75484e-4020-48f7-9419-bd88d0462b90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.323s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.521558] env[69992]: DEBUG nova.scheduler.client.report [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.615957] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.794729} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.616265] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039/a7f01cd7-f148-48fc-a71a-5461672d6039.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.616441] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.616689] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6c76dc4-eb7c-4043-a1ec-d3ea28ef795f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.626430] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 971.626430] env[69992]: value = "task-2896944" [ 971.626430] env[69992]: _type = "Task" [ 971.626430] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.637413] env[69992]: DEBUG nova.network.neutron [-] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.638727] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.667766] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35c41867-63c8-43ab-a970-64745baa76d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.678942] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a324749-feb7-47ae-9efc-6fe3cb0bfcfe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.721032] env[69992]: DEBUG nova.compute.manager [req-b134b1b3-e4fa-4bb3-a519-127af8c84cc2 req-9c9a54f0-3a26-4fdf-8798-b31605619fe2 service nova] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Detach interface failed, port_id=1f86db68-8a81-421c-aa9b-4daab0584c4c, reason: Instance ee4c0f2b-44cb-4b37-8e4a-5706b9932144 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 971.734856] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896943, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.835567] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 972.026301] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.274s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.032503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 31.388s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.056736] env[69992]: INFO nova.scheduler.client.report [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Deleted allocations for instance ab3df643-58db-45b7-a572-9c040135989d [ 972.142632] env[69992]: INFO nova.compute.manager [-] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Took 1.58 seconds to deallocate network for instance. [ 972.143498] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07314} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.145299] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.148808] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0d2b27-c63c-46a4-96a1-27e9da3d5c77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.174377] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039/a7f01cd7-f148-48fc-a71a-5461672d6039.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.176277] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd5e7a16-a1b1-4754-8bca-24e6a2b051ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.198432] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 972.198432] env[69992]: value = "task-2896945" [ 972.198432] env[69992]: _type = "Task" [ 972.198432] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.207712] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.234049] env[69992]: INFO nova.compute.manager [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Rescuing [ 972.234414] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.234633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.234822] env[69992]: DEBUG nova.network.neutron [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.236101] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896943, 'name': CreateSnapshot_Task, 'duration_secs': 0.689639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.236344] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 972.237142] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50e77e2-5667-4641-a759-e4327ae9e5aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.250705] env[69992]: INFO nova.compute.manager [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Rescuing [ 972.250980] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.251188] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.251356] env[69992]: DEBUG nova.network.neutron [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.358891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.565851] env[69992]: DEBUG oslo_concurrency.lockutils [None req-994f1034-48ae-4bb6-b580-f1492895a37d tempest-ServerGroupTestJSON-1105220050 tempest-ServerGroupTestJSON-1105220050-project-member] Lock "ab3df643-58db-45b7-a572-9c040135989d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.255s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.654693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.714615] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.755434] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 972.757863] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7670f7bd-8203-4d64-95b7-368ff2cfe3a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.768936] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 972.768936] env[69992]: value = "task-2896946" [ 972.768936] env[69992]: _type = "Task" [ 972.768936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.778415] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896946, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.081075] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.081815] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance ee4c0f2b-44cb-4b37-8e4a-5706b9932144 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082103] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e5d9de80-1ee5-462a-8459-168fd60e1972 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082232] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance eba81db1-973c-4981-baca-cb98e4087510 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082365] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a9274dfc-afbd-419b-a98b-053d71a05d7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082623] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 27580836-7ab5-4e64-a985-3e6fc22a8b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082623] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 27492ef7-8258-4001-b3b3-5bcb94e12c1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082754] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a49b4721-e338-4e60-b91e-137caa3c9c03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.082946] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 73e41918-88b8-4ff7-9fdd-b45ac97c80ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.083112] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1d436762-964d-40d9-871e-ee33c3ba25b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.083425] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a29534bf-ee12-4b94-839b-4a12659ebd3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.083580] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.083828] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.083911] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 94a4a16e-926c-47ce-a5a7-0b216b7c5442 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 973.083986] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance c205f559-7fe6-4d7e-beba-2fc96b89d705 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 973.084338] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance eec50935-f553-43c7-b67b-7289299745bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.084504] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance dd31269e-716c-44cd-9fc3-ce227fe5b3b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.084555] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a8813822-f77b-4b73-a6dc-e0eab83b0402 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.084707] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e0b5ad16-f631-444c-a189-167e34574316 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.084831] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance bcb5131c-b2c6-4971-8a2e-4fcd7133442d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.084950] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 97cb6372-3f4e-427d-9509-7e6c43aa2e7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.085089] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a7f01cd7-f148-48fc-a71a-5461672d6039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 973.220839] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896945, 'name': ReconfigVM_Task, 'duration_secs': 0.893813} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.220839] env[69992]: DEBUG nova.network.neutron [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updating instance_info_cache with network_info: [{"id": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "address": "fa:16:3e:c1:93:aa", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a98a5-6c", "ovs_interfaceid": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.221269] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Reconfigured VM instance instance-00000024 to attach disk [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039/a7f01cd7-f148-48fc-a71a-5461672d6039.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.223046] env[69992]: DEBUG nova.network.neutron [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updating instance_info_cache with network_info: [{"id": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "address": "fa:16:3e:4e:44:12", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0113e0-6f", "ovs_interfaceid": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.224998] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-425668df-3ddf-4bb9-85a1-a3f8df8f0257 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.237021] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 973.237021] env[69992]: value = "task-2896947" [ 973.237021] env[69992]: _type = "Task" [ 973.237021] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.247711] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896947, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.281745] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896946, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.590344] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance d361769c-bfc2-4c72-83f4-dc9b51f907a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 973.729018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.734245] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.749484] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896947, 'name': Rename_Task, 'duration_secs': 0.188312} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.749484] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 973.749484] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-757c0f1d-274f-44cd-bb7f-a1894033d1b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.757645] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 973.757645] env[69992]: value = "task-2896948" [ 973.757645] env[69992]: _type = "Task" [ 973.757645] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.783518] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.791706] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896946, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.093711] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 974.275455] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896948, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.289576] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896946, 'name': CloneVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.599070] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 974.770508] env[69992]: DEBUG oslo_vmware.api [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2896948, 'name': PowerOnVM_Task, 'duration_secs': 0.895961} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.771034] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.771034] env[69992]: INFO nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 9.69 seconds to spawn the instance on the hypervisor. [ 974.771238] env[69992]: DEBUG nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.772099] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2fa229-13f5-4bd3-8fef-0aad0b8845b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.792141] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896946, 'name': CloneVM_Task, 'duration_secs': 1.592106} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.797274] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Created linked-clone VM from snapshot [ 974.805022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c9940e-5311-413b-971c-9dc0b29719f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.813926] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Uploading image 39e743b4-f55e-4237-8936-ced158cc751a {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 974.848090] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 974.848090] env[69992]: value = "vm-581932" [ 974.848090] env[69992]: _type = "VirtualMachine" [ 974.848090] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 974.848688] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-88f525e7-2bdd-4ab5-aa9b-9215e2b8f937 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.857839] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lease: (returnval){ [ 974.857839] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5229a4ec-14b8-6250-6027-bbf88493c62b" [ 974.857839] env[69992]: _type = "HttpNfcLease" [ 974.857839] env[69992]: } obtained for exporting VM: (result){ [ 974.857839] env[69992]: value = "vm-581932" [ 974.857839] env[69992]: _type = "VirtualMachine" [ 974.857839] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 974.858148] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the lease: (returnval){ [ 974.858148] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5229a4ec-14b8-6250-6027-bbf88493c62b" [ 974.858148] env[69992]: _type = "HttpNfcLease" [ 974.858148] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 974.865568] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 974.865568] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5229a4ec-14b8-6250-6027-bbf88493c62b" [ 974.865568] env[69992]: _type = "HttpNfcLease" [ 974.865568] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 975.104464] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.225182] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.225452] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.288917] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.288917] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce07a20e-531b-41c3-bc84-e1f00e479ec6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.300054] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 975.300054] env[69992]: value = "task-2896950" [ 975.300054] env[69992]: _type = "Task" [ 975.300054] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.303336] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.303853] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d511efa-1b81-45de-b193-5133ba50c771 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.316330] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.320594] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 975.320594] env[69992]: value = "task-2896951" [ 975.320594] env[69992]: _type = "Task" [ 975.320594] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.321320] env[69992]: INFO nova.compute.manager [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 38.22 seconds to build instance. [ 975.334230] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.368788] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 975.368788] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5229a4ec-14b8-6250-6027-bbf88493c62b" [ 975.368788] env[69992]: _type = "HttpNfcLease" [ 975.368788] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 975.370174] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 975.370174] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5229a4ec-14b8-6250-6027-bbf88493c62b" [ 975.370174] env[69992]: _type = "HttpNfcLease" [ 975.370174] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 975.373953] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e828a7e3-1f1a-49a9-9872-6e410f526ba3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.381330] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a1200-e57c-71f1-5a07-9fccab55ef12/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 975.381834] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a1200-e57c-71f1-5a07-9fccab55ef12/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 975.503330] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-97ef4438-1445-4eea-b62f-a755b48d7b27 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.608094] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.808803] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896950, 'name': PowerOffVM_Task, 'duration_secs': 0.351074} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.809307] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.810036] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3102ee5c-f3ef-4598-b568-38bc9b70fd72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.833381] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c954bdd9-ccf6-4344-9720-b4318d63c4c8 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.594s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.838674] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbea2dc-1cc5-4d3c-8de5-2ee56aa652f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.848052] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896951, 'name': PowerOffVM_Task, 'duration_secs': 0.292668} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.850919] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.854008] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4ad31b-70bb-4f25-85e0-2df2d00ea6fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.883836] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7914afd9-9f3b-4460-bacf-07890c289e6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.896745] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.897121] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27782476-9c90-4512-8e94-5f9d857eddb5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.908919] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 975.908919] env[69992]: value = "task-2896952" [ 975.908919] env[69992]: _type = "Task" [ 975.908919] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.922049] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 975.922049] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.922049] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.922049] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.922049] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.922288] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fec3ebd7-c86e-4add-a40a-1877a0b6d01b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.929265] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.930039] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d63260a5-9956-4058-b4ac-cf61526a7893 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.934023] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.934242] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.934997] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bdbf377-90e5-4a97-ae3d-328a6e7d4bb1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.949868] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 975.949868] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521e3993-5887-cf39-01b5-4d5cbeb3ad0b" [ 975.949868] env[69992]: _type = "Task" [ 975.949868] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.951959] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 975.951959] env[69992]: value = "task-2896953" [ 975.951959] env[69992]: _type = "Task" [ 975.951959] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.966397] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521e3993-5887-cf39-01b5-4d5cbeb3ad0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.970414] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 975.972864] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.972864] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.972864] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.972864] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.972864] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95e0ee88-b509-4d63-9659-69941128fbe2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.983414] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.983711] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.984786] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41f7a3d4-b4ed-4123-b9cb-de5b30df09ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.995519] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 975.995519] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523bc3ea-f136-3bcf-8dbd-4044114651a2" [ 975.995519] env[69992]: _type = "Task" [ 975.995519] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.002532] env[69992]: DEBUG nova.compute.manager [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Received event network-changed-b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.002836] env[69992]: DEBUG nova.compute.manager [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Refreshing instance network info cache due to event network-changed-b39fa912-b02a-4764-8cc8-f79e08d575c6. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 976.003649] env[69992]: DEBUG oslo_concurrency.lockutils [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.003649] env[69992]: DEBUG oslo_concurrency.lockutils [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.003953] env[69992]: DEBUG nova.network.neutron [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Refreshing network info cache for port b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.014515] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523bc3ea-f136-3bcf-8dbd-4044114651a2, 'name': SearchDatastore_Task, 'duration_secs': 0.014648} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.017427] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-722f78b6-66ce-4dbb-90e1-432a2c617588 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.028417] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 976.028417] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52261477-b7de-399e-834f-2c9577b4a810" [ 976.028417] env[69992]: _type = "Task" [ 976.028417] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.042436] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52261477-b7de-399e-834f-2c9577b4a810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.111797] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 62936d27-5405-4d29-b3ff-c4d8a74ba440 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 976.342616] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 976.465136] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521e3993-5887-cf39-01b5-4d5cbeb3ad0b, 'name': SearchDatastore_Task, 'duration_secs': 0.036489} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.466237] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23893fc7-d654-470c-8c2c-cd9259bdae1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.473676] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 976.473676] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525dd565-f15f-4599-7775-358a4e53dafc" [ 976.473676] env[69992]: _type = "Task" [ 976.473676] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.483275] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525dd565-f15f-4599-7775-358a4e53dafc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.541116] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52261477-b7de-399e-834f-2c9577b4a810, 'name': SearchDatastore_Task, 'duration_secs': 0.016013} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.541588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.541822] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. {{(pid=69992) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 976.542608] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1bcbe84-3924-4165-94d6-8152a272079d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.552230] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 976.552230] env[69992]: value = "task-2896954" [ 976.552230] env[69992]: _type = "Task" [ 976.552230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.563647] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.619288] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 2b1a0943-d59a-441d-a2e6-8149106803b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 976.834725] env[69992]: DEBUG nova.network.neutron [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updated VIF entry in instance network info cache for port b39fa912-b02a-4764-8cc8-f79e08d575c6. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.835842] env[69992]: DEBUG nova.network.neutron [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.871582] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.988944] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525dd565-f15f-4599-7775-358a4e53dafc, 'name': SearchDatastore_Task, 'duration_secs': 0.019796} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.989372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.989752] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. {{(pid=69992) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 976.990686] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40f913c4-350b-4aff-ab01-e3174306f9bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.008470] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 977.008470] env[69992]: value = "task-2896955" [ 977.008470] env[69992]: _type = "Task" [ 977.008470] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.022469] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.064909] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896954, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.123146] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 714fafbf-a765-4e2c-8633-997d8244483c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 977.338746] env[69992]: DEBUG oslo_concurrency.lockutils [req-aee745da-8fe5-4b54-9741-870047784890 req-81029850-825f-4b37-b28c-23a1935597bd service nova] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.521529] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896955, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.564546] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.717907} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.564856] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. [ 977.565971] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c450ce-e084-4370-8c2b-0ff94ac836a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.599425] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.599925] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a604b30-51fb-4090-963b-ea42af915304 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.629732] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance b7a1b9e1-4d57-435f-bdb6-51481968aacb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 977.631729] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 977.631729] env[69992]: value = "task-2896956" [ 977.631729] env[69992]: _type = "Task" [ 977.631729] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.645583] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896956, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.022462] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896955, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70777} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.022462] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. [ 978.026191] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98ff5d5-a084-41ed-94c0-21203f6f89d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.052786] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.053291] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2f7850b-638c-49ab-87f3-3f8f27400d6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.073179] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 978.073179] env[69992]: value = "task-2896957" [ 978.073179] env[69992]: _type = "Task" [ 978.073179] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.083139] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896957, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.133366] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance af07ebd0-5f12-49c3-a518-95be9a8d6c82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 978.148692] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896956, 'name': ReconfigVM_Task, 'duration_secs': 0.344314} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.149058] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Reconfigured VM instance instance-0000001e to attach disk [datastore2] eec50935-f553-43c7-b67b-7289299745bd/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.150037] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d577d7-8fbe-4000-92cf-52879f0b26b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.180121] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cfa61b9-f59f-4afc-abda-2b4e4ca1f468 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.198605] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 978.198605] env[69992]: value = "task-2896958" [ 978.198605] env[69992]: _type = "Task" [ 978.198605] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.208843] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896958, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.585286] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896957, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.643954] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance b3d62400-e639-4c49-9207-64fd1e684f99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 978.714516] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896958, 'name': ReconfigVM_Task, 'duration_secs': 0.19239} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.714571] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.714895] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-998fa215-a580-4508-aad0-36f7ba9da3e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.726627] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 978.726627] env[69992]: value = "task-2896959" [ 978.726627] env[69992]: _type = "Task" [ 978.726627] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.736231] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.085925] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896957, 'name': ReconfigVM_Task, 'duration_secs': 0.746631} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.086274] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.087140] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8f0976-a763-46e9-a7cf-d33b6f3b704e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.116603] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7098d05b-325b-4d74-b008-5f9b0f48fc26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.134793] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 979.134793] env[69992]: value = "task-2896960" [ 979.134793] env[69992]: _type = "Task" [ 979.134793] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.145487] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.147384] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 7fc7c481-75e8-40f2-a971-752ce6dde59b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.244472] env[69992]: DEBUG oslo_vmware.api [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896959, 'name': PowerOnVM_Task, 'duration_secs': 0.480569} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.244830] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.247644] env[69992]: DEBUG nova.compute.manager [None req-c0cd4889-7490-4994-8b71-4cdf7ed09354 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.248503] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a43d358-a445-48b9-a0f1-c1e092163f6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.645375] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896960, 'name': ReconfigVM_Task, 'duration_secs': 0.345549} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.645863] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.646173] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44c360ed-0fec-4ce1-8335-6e7e863360e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.649762] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a06d4b38-0e39-46ef-a588-7627661cb201 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.654814] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 979.654814] env[69992]: value = "task-2896961" [ 979.654814] env[69992]: _type = "Task" [ 979.654814] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.663648] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896961, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.154819] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.170482] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896961, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.333972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.334250] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.657953] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 06442c68-7dc6-46a1-9e35-34a62730a555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.668592] env[69992]: DEBUG oslo_vmware.api [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896961, 'name': PowerOnVM_Task, 'duration_secs': 0.613607} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.668859] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.675247] env[69992]: DEBUG nova.compute.manager [None req-b28d51e4-80f0-487c-898f-94704c02b8e8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 980.676419] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7a71e6-3e1d-4329-b2db-6a1c8d0fbb6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.845019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.845270] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lock "eba81db1-973c-4981-baca-cb98e4087510" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.927625] env[69992]: INFO nova.compute.manager [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Unrescuing [ 980.927625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.927625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.927793] env[69992]: DEBUG nova.network.neutron [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.163287] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 673be00f-e3c5-4a54-beeb-cf89828e9e32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 981.163659] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 981.163824] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 981.348296] env[69992]: DEBUG nova.compute.utils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 981.653222] env[69992]: DEBUG nova.network.neutron [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updating instance_info_cache with network_info: [{"id": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "address": "fa:16:3e:4e:44:12", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae0113e0-6f", "ovs_interfaceid": "ae0113e0-6fd4-44a9-b496-7e09ffb4539b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.742856] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803063bc-4f51-43d9-9db5-27fa6effbcbb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.751886] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4d94c3-c57f-43a2-9596-cd20f91c6490 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.786943] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c417d311-6620-4d5a-bb8c-3bbb884b9b20 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.796662] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1f6055-1b8c-416d-857d-100bcc872715 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.812267] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.851481] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lock "eba81db1-973c-4981-baca-cb98e4087510" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.156451] env[69992]: DEBUG oslo_concurrency.lockutils [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-eec50935-f553-43c7-b67b-7289299745bd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.157302] env[69992]: DEBUG nova.objects.instance [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'flavor' on Instance uuid eec50935-f553-43c7-b67b-7289299745bd {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.315646] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.664884] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1539a88-2713-46cd-a23e-013d7aecc2b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.688393] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.688769] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e472346-9eb1-45bc-9f34-8c29257d0cd1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.697943] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 982.697943] env[69992]: value = "task-2896962" [ 982.697943] env[69992]: _type = "Task" [ 982.697943] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.708524] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.784884] env[69992]: INFO nova.compute.manager [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Unrescuing [ 982.785203] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.785357] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquired lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.785654] env[69992]: DEBUG nova.network.neutron [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.820528] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 982.820807] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.791s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.821102] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.108s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.822667] env[69992]: INFO nova.compute.claims [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.938954] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.938954] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lock "eba81db1-973c-4981-baca-cb98e4087510" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.938954] env[69992]: INFO nova.compute.manager [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Attaching volume 794429f7-4ac5-40f0-a80a-f64e8e163e5d to /dev/sdb [ 982.984705] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b40007-d882-4be1-8c48-22e580e26ebb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.993414] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e26f32-8e52-42cb-a3f6-28e39ba4d125 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.012272] env[69992]: DEBUG nova.virt.block_device [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updating existing volume attachment record: 2d8698b6-1445-4886-8fc7-9262456c1ca2 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 983.208701] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896962, 'name': PowerOffVM_Task, 'duration_secs': 0.278821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.208701] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.213832] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Reconfiguring VM instance instance-0000001e to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 983.214099] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06ceefae-8036-4043-9274-10bccc8e42a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.234247] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 983.234247] env[69992]: value = "task-2896966" [ 983.234247] env[69992]: _type = "Task" [ 983.234247] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.243182] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.494183] env[69992]: DEBUG nova.network.neutron [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updating instance_info_cache with network_info: [{"id": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "address": "fa:16:3e:c1:93:aa", "network": {"id": "6427034b-cb68-41ff-8426-d7ce876af837", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-938441390-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "3fc8205ec2e14fdba28998521b552a69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2a98a5-6c", "ovs_interfaceid": "9f2a98a5-6cca-48ee-84a6-66bf08b7e92f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.745950] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896966, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.940274] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a1200-e57c-71f1-5a07-9fccab55ef12/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 983.941309] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2383554d-9311-4786-81a8-da5d434933ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.951856] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a1200-e57c-71f1-5a07-9fccab55ef12/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 983.952084] env[69992]: ERROR oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a1200-e57c-71f1-5a07-9fccab55ef12/disk-0.vmdk due to incomplete transfer. [ 983.952344] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2059c45e-338d-4637-a5f8-734a4b4ba075 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.964166] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520a1200-e57c-71f1-5a07-9fccab55ef12/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 983.964371] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Uploaded image 39e743b4-f55e-4237-8936-ced158cc751a to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 983.967102] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 983.967573] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-02762707-a10e-4de4-b0d3-a67d380b9eee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.979809] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 983.979809] env[69992]: value = "task-2896967" [ 983.979809] env[69992]: _type = "Task" [ 983.979809] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.992678] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896967, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.996633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Releasing lock "refresh_cache-97cb6372-3f4e-427d-9509-7e6c43aa2e7b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.997350] env[69992]: DEBUG nova.objects.instance [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lazy-loading 'flavor' on Instance uuid 97cb6372-3f4e-427d-9509-7e6c43aa2e7b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.250054] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896966, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.383920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d98a3d-571c-4652-8d23-6e301ba7d4dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.397355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3820b131-a91c-486d-9423-665d560959f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.431624] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c472ac-b48d-4d7f-8799-92b5be96701a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.439876] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a76b2dc-06ca-4475-abdf-6a9207db7420 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.454442] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.489949] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896967, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.505199] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7917a21f-60ee-4cb7-8634-86af0d94b192 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.531061] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.531403] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-747a7aae-2654-47d3-9656-07627fbee074 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.538914] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 984.538914] env[69992]: value = "task-2896968" [ 984.538914] env[69992]: _type = "Task" [ 984.538914] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.547766] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.746831] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896966, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.958164] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.991763] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896967, 'name': Destroy_Task, 'duration_secs': 0.730644} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.992066] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Destroyed the VM [ 984.992309] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 984.992560] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f166d8bd-a1cb-46a9-8e3d-4b51f09a82ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.001444] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 985.001444] env[69992]: value = "task-2896969" [ 985.001444] env[69992]: _type = "Task" [ 985.001444] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.011271] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896969, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.050376] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896968, 'name': PowerOffVM_Task, 'duration_secs': 0.374332} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.050718] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.056594] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Reconfiguring VM instance instance-00000023 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 985.056903] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03fbacc4-d37e-441d-a771-3ff017e7b63c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.079216] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 985.079216] env[69992]: value = "task-2896970" [ 985.079216] env[69992]: _type = "Task" [ 985.079216] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.091915] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896970, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.246890] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896966, 'name': ReconfigVM_Task, 'duration_secs': 1.82384} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.247100] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Reconfigured VM instance instance-0000001e to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 985.247326] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.247614] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1eea0526-1597-48eb-aba5-65bb7fea5a96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.255668] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 985.255668] env[69992]: value = "task-2896971" [ 985.255668] env[69992]: _type = "Task" [ 985.255668] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.264508] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.464822] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.465387] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 985.467993] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.422s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.469418] env[69992]: INFO nova.compute.claims [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.514448] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896969, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.589264] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896970, 'name': ReconfigVM_Task, 'duration_secs': 0.306955} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.589562] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Reconfigured VM instance instance-00000023 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 985.589757] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.590071] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2ccfd51-5a60-4f8c-b390-e63f9acb5ef8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.598398] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 985.598398] env[69992]: value = "task-2896973" [ 985.598398] env[69992]: _type = "Task" [ 985.598398] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.619368] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.768017] env[69992]: DEBUG oslo_vmware.api [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2896971, 'name': PowerOnVM_Task, 'duration_secs': 0.423001} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.768017] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.768017] env[69992]: DEBUG nova.compute.manager [None req-09b855f8-6f76-4e45-bae0-ac60c0a3aca3 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.768017] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e0d940-ed2c-4a28-8713-4b399b4486b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.975817] env[69992]: DEBUG nova.compute.utils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 985.980678] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 985.980908] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.012163] env[69992]: DEBUG oslo_vmware.api [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896969, 'name': RemoveSnapshot_Task, 'duration_secs': 0.539893} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.012427] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 986.012771] env[69992]: INFO nova.compute.manager [None req-1f9060c4-77fd-4f63-bd1b-f330f37f86f0 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Took 15.36 seconds to snapshot the instance on the hypervisor. [ 986.025838] env[69992]: DEBUG nova.policy [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dbc40b90a6e4dad8d4977afc0a5c18d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2c061f9a6740919869fc68b54d074e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 986.112301] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.321615] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Successfully created port: c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.481703] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 986.615059] env[69992]: DEBUG oslo_vmware.api [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896973, 'name': PowerOnVM_Task, 'duration_secs': 0.564862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.618663] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.619253] env[69992]: DEBUG nova.compute.manager [None req-81f61a52-266d-410b-a743-b174488280f8 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.620943] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339e04db-84f5-4faa-a49d-ce9d1b5f84e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.008811] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b762fc-1f4c-410b-b9cc-11a26b3d894d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.018323] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80456c9c-d02a-498a-836b-13cdb6e0f56d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.051319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e1eabb-67f4-48e6-b4aa-dd7882929b03 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.060801] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5122b908-c9a7-4cb7-a67d-0c4f1f963ae1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.074937] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.497360] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 987.520430] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.520695] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.520857] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.521059] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.521198] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.521345] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.521559] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.521714] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.521880] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.522055] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.522231] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.523115] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838f71f1-6e99-4fbd-97bc-bb723214c052 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.533658] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695522c4-05e1-481f-87ab-1290b514b754 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.569603] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 987.569847] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581935', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'name': 'volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'eba81db1-973c-4981-baca-cb98e4087510', 'attached_at': '', 'detached_at': '', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'serial': '794429f7-4ac5-40f0-a80a-f64e8e163e5d'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 987.570834] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e15c9c-e5ef-4801-aa7a-3f68bfe50b73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.591824] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fb2e4d-8e0f-47c9-9a40-319bdb0a7356 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.617918] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d/volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.619068] env[69992]: ERROR nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [req-2a893366-1cad-440e-8609-ecacd5932680] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2a893366-1cad-440e-8609-ecacd5932680"}]} [ 987.620165] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66837978-125b-44dd-9160-70a7007a9710 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.644670] env[69992]: DEBUG oslo_vmware.api [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Waiting for the task: (returnval){ [ 987.644670] env[69992]: value = "task-2896974" [ 987.644670] env[69992]: _type = "Task" [ 987.644670] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.654504] env[69992]: DEBUG oslo_vmware.api [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896974, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.655531] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 987.673321] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 987.673552] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.689204] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 987.710062] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 987.846205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.846388] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.846593] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.846769] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.846962] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.850103] env[69992]: INFO nova.compute.manager [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Terminating instance [ 987.963468] env[69992]: DEBUG nova.compute.manager [req-228b6281-8a5f-459b-b744-c64bef81237d req-dbf6b219-a77b-4e6c-be89-6ad745a4ff4d service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Received event network-vif-plugged-c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 987.963848] env[69992]: DEBUG oslo_concurrency.lockutils [req-228b6281-8a5f-459b-b744-c64bef81237d req-dbf6b219-a77b-4e6c-be89-6ad745a4ff4d service nova] Acquiring lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.964098] env[69992]: DEBUG oslo_concurrency.lockutils [req-228b6281-8a5f-459b-b744-c64bef81237d req-dbf6b219-a77b-4e6c-be89-6ad745a4ff4d service nova] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.964271] env[69992]: DEBUG oslo_concurrency.lockutils [req-228b6281-8a5f-459b-b744-c64bef81237d req-dbf6b219-a77b-4e6c-be89-6ad745a4ff4d service nova] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.964452] env[69992]: DEBUG nova.compute.manager [req-228b6281-8a5f-459b-b744-c64bef81237d req-dbf6b219-a77b-4e6c-be89-6ad745a4ff4d service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] No waiting events found dispatching network-vif-plugged-c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.964877] env[69992]: WARNING nova.compute.manager [req-228b6281-8a5f-459b-b744-c64bef81237d req-dbf6b219-a77b-4e6c-be89-6ad745a4ff4d service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Received unexpected event network-vif-plugged-c3a06e04-2fae-4c1e-bece-fd85d6e74f50 for instance with vm_state building and task_state spawning. [ 987.998539] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Successfully updated port: c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.162373] env[69992]: DEBUG oslo_vmware.api [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896974, 'name': ReconfigVM_Task, 'duration_secs': 0.378804} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.162703] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Reconfigured VM instance instance-0000000d to attach disk [datastore2] volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d/volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.171014] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32e6f983-71ae-46e9-b3e9-070eab4b286a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.192959] env[69992]: DEBUG oslo_vmware.api [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Waiting for the task: (returnval){ [ 988.192959] env[69992]: value = "task-2896975" [ 988.192959] env[69992]: _type = "Task" [ 988.192959] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.207356] env[69992]: DEBUG oslo_vmware.api [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896975, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.333992] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d2e485-dbe6-4af1-bc73-c1004a71519b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.342834] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b8f9d6-d825-4159-8279-1cfcf50f0dba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.378228] env[69992]: DEBUG nova.compute.manager [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.378563] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.379719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbf7118-683f-4fbf-b2cd-cc73aa8fef32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.383456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94596604-3789-46ee-8678-e53f682acd97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.398296] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.398863] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94bdcbf6-607b-4055-bcd3-12928f06d408 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.404286] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0933f5-9807-41c3-962b-ffdc9e6c8247 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.422083] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 988.425062] env[69992]: DEBUG oslo_vmware.api [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 988.425062] env[69992]: value = "task-2896976" [ 988.425062] env[69992]: _type = "Task" [ 988.425062] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.435528] env[69992]: DEBUG oslo_vmware.api [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.506229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "refresh_cache-d361769c-bfc2-4c72-83f4-dc9b51f907a3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.506229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "refresh_cache-d361769c-bfc2-4c72-83f4-dc9b51f907a3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.506229] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.707837] env[69992]: DEBUG oslo_vmware.api [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896975, 'name': ReconfigVM_Task, 'duration_secs': 0.160164} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.708155] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581935', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'name': 'volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'eba81db1-973c-4981-baca-cb98e4087510', 'attached_at': '', 'detached_at': '', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'serial': '794429f7-4ac5-40f0-a80a-f64e8e163e5d'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 988.899595] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "a29534bf-ee12-4b94-839b-4a12659ebd3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.899873] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.900105] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "a29534bf-ee12-4b94-839b-4a12659ebd3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.900290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.900635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.904673] env[69992]: INFO nova.compute.manager [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Terminating instance [ 988.939628] env[69992]: DEBUG oslo_vmware.api [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896976, 'name': PowerOffVM_Task, 'duration_secs': 0.222071} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.939893] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.940074] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.940371] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef18eed6-faa7-444d-8ddd-4a2447b897ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.968155] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 988.968155] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 64 to 65 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 988.968155] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 989.028112] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.028406] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.028657] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Deleting the datastore file [datastore1] 97cb6372-3f4e-427d-9509-7e6c43aa2e7b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.028973] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bcb8059-b2bc-4fc0-9ff8-ba08fba80d2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.038323] env[69992]: DEBUG oslo_vmware.api [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 989.038323] env[69992]: value = "task-2896978" [ 989.038323] env[69992]: _type = "Task" [ 989.038323] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.048257] env[69992]: DEBUG oslo_vmware.api [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.057266] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.196252] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Updating instance_info_cache with network_info: [{"id": "c3a06e04-2fae-4c1e-bece-fd85d6e74f50", "address": "fa:16:3e:b5:c3:14", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3a06e04-2f", "ovs_interfaceid": "c3a06e04-2fae-4c1e-bece-fd85d6e74f50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.299594] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "a9274dfc-afbd-419b-a98b-053d71a05d7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.299894] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.300138] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "a9274dfc-afbd-419b-a98b-053d71a05d7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.300351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.300538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.302600] env[69992]: INFO nova.compute.manager [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Terminating instance [ 989.410826] env[69992]: DEBUG nova.compute.manager [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.411091] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.412099] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cef35ba-f897-46ff-bb9f-509e4b272e73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.422188] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.422188] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7f97afd-a442-40e4-b8b2-c1d520f91880 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.428872] env[69992]: DEBUG oslo_vmware.api [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 989.428872] env[69992]: value = "task-2896979" [ 989.428872] env[69992]: _type = "Task" [ 989.428872] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.440569] env[69992]: DEBUG oslo_vmware.api [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.475324] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.006s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.475324] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 989.478287] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.703s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.480155] env[69992]: INFO nova.compute.claims [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.548954] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.549226] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.554965] env[69992]: DEBUG oslo_vmware.api [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2896978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14419} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.555228] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.555412] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.555588] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.555759] env[69992]: INFO nova.compute.manager [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 989.555994] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.556427] env[69992]: DEBUG nova.compute.manager [-] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.556524] env[69992]: DEBUG nova.network.neutron [-] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.701216] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "refresh_cache-d361769c-bfc2-4c72-83f4-dc9b51f907a3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.701586] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Instance network_info: |[{"id": "c3a06e04-2fae-4c1e-bece-fd85d6e74f50", "address": "fa:16:3e:b5:c3:14", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3a06e04-2f", "ovs_interfaceid": "c3a06e04-2fae-4c1e-bece-fd85d6e74f50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.702034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:c3:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4b6ddb2-2e19-4031-9b22-add90d41a114', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3a06e04-2fae-4c1e-bece-fd85d6e74f50', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.709950] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Creating folder: Project (6e2c061f9a6740919869fc68b54d074e). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.710281] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2423eeb6-7f1b-4f4a-8a18-3c6182f9e9fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.724030] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Created folder: Project (6e2c061f9a6740919869fc68b54d074e) in parent group-v581821. [ 989.724030] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Creating folder: Instances. Parent ref: group-v581936. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.724030] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b1ee9e4-8f73-4e71-8c2a-c61c8f4ad2df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.735804] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Created folder: Instances in parent group-v581936. [ 989.736485] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.736807] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.737325] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6179b97b-b552-4fcb-95b0-95eaa9ddd8b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.760288] env[69992]: DEBUG nova.objects.instance [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lazy-loading 'flavor' on Instance uuid eba81db1-973c-4981-baca-cb98e4087510 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.763041] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.763041] env[69992]: value = "task-2896982" [ 989.763041] env[69992]: _type = "Task" [ 989.763041] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.778961] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896982, 'name': CreateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.806451] env[69992]: DEBUG nova.compute.manager [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.806636] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.808048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953a2cf1-b5d1-4512-9024-1f58c15bb95e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.820031] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.820031] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3520cc1-9a22-461b-87d8-d973f1268c1b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.829367] env[69992]: DEBUG oslo_vmware.api [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 989.829367] env[69992]: value = "task-2896983" [ 989.829367] env[69992]: _type = "Task" [ 989.829367] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.845030] env[69992]: DEBUG oslo_vmware.api [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.915174] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.915416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.940735] env[69992]: DEBUG oslo_vmware.api [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896979, 'name': PowerOffVM_Task, 'duration_secs': 0.217722} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.940735] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.940909] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.943014] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51ced2dd-001a-4bf0-901d-5899e8b727f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.985366] env[69992]: DEBUG nova.compute.utils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 989.990056] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 989.990290] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.036562] env[69992]: DEBUG nova.compute.manager [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Received event network-changed-c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.036872] env[69992]: DEBUG nova.compute.manager [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Refreshing instance network info cache due to event network-changed-c3a06e04-2fae-4c1e-bece-fd85d6e74f50. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 990.037223] env[69992]: DEBUG oslo_concurrency.lockutils [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] Acquiring lock "refresh_cache-d361769c-bfc2-4c72-83f4-dc9b51f907a3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.037409] env[69992]: DEBUG oslo_concurrency.lockutils [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] Acquired lock "refresh_cache-d361769c-bfc2-4c72-83f4-dc9b51f907a3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.037593] env[69992]: DEBUG nova.network.neutron [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Refreshing network info cache for port c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.040629] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.040924] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.041131] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Deleting the datastore file [datastore2] a29534bf-ee12-4b94-839b-4a12659ebd3b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.042491] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21f3464b-de7f-4b52-b2f0-15561799d26e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.053395] env[69992]: DEBUG nova.policy [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dbc40b90a6e4dad8d4977afc0a5c18d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2c061f9a6740919869fc68b54d074e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.057178] env[69992]: DEBUG oslo_vmware.api [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 990.057178] env[69992]: value = "task-2896985" [ 990.057178] env[69992]: _type = "Task" [ 990.057178] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.070930] env[69992]: DEBUG oslo_vmware.api [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.268761] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bb27e860-6f74-4f08-9ad5-a430d5b41bb2 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lock "eba81db1-973c-4981-baca-cb98e4087510" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.330s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.275898] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896982, 'name': CreateVM_Task, 'duration_secs': 0.414504} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.277333] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.279795] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.279795] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.279795] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.279795] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051651ef-9bc7-40a4-879d-8be40ccf8ae1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.285207] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 990.285207] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5260807d-4342-7d92-61ef-477b8359cf2a" [ 990.285207] env[69992]: _type = "Task" [ 990.285207] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.297583] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5260807d-4342-7d92-61ef-477b8359cf2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.324744] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Successfully created port: bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.326745] env[69992]: DEBUG nova.network.neutron [-] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.341064] env[69992]: DEBUG oslo_vmware.api [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896983, 'name': PowerOffVM_Task, 'duration_secs': 0.186294} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.341399] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.341794] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.341911] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d066391-a51e-41b3-a7ef-7331893590fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.414900] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.415057] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.415242] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Deleting the datastore file [datastore2] a9274dfc-afbd-419b-a98b-053d71a05d7c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.415504] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf219f3e-91ee-4f7d-bb62-1e66a66e8b98 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.423440] env[69992]: DEBUG oslo_vmware.api [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 990.423440] env[69992]: value = "task-2896987" [ 990.423440] env[69992]: _type = "Task" [ 990.423440] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.432049] env[69992]: DEBUG oslo_vmware.api [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.488416] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 990.569672] env[69992]: DEBUG oslo_vmware.api [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2896985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235524} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.573831] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.574223] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.574652] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.574987] env[69992]: INFO nova.compute.manager [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 990.575843] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.576492] env[69992]: DEBUG nova.compute.manager [-] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.576800] env[69992]: DEBUG nova.network.neutron [-] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.802946] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5260807d-4342-7d92-61ef-477b8359cf2a, 'name': SearchDatastore_Task, 'duration_secs': 0.023228} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.803951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.804586] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.804878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.805786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.805786] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.805786] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40d57eaa-f8bc-40d2-b735-33e03163b318 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.818472] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.818938] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.821408] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d67c640-de1b-4ad5-8e5d-b627bddd42c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.828134] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 990.828134] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528936aa-d4e2-b2ea-3127-1ccee5793567" [ 990.828134] env[69992]: _type = "Task" [ 990.828134] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.831433] env[69992]: INFO nova.compute.manager [-] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Took 1.27 seconds to deallocate network for instance. [ 990.845618] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528936aa-d4e2-b2ea-3127-1ccee5793567, 'name': SearchDatastore_Task, 'duration_secs': 0.009846} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.849242] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cda258b0-92bd-4ca7-8b5b-29b2c44025a4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.858045] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 990.858045] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52047cca-fc38-9bb2-7ed1-f2b98e2ce0fa" [ 990.858045] env[69992]: _type = "Task" [ 990.858045] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.865106] env[69992]: DEBUG nova.network.neutron [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Updated VIF entry in instance network info cache for port c3a06e04-2fae-4c1e-bece-fd85d6e74f50. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.865475] env[69992]: DEBUG nova.network.neutron [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Updating instance_info_cache with network_info: [{"id": "c3a06e04-2fae-4c1e-bece-fd85d6e74f50", "address": "fa:16:3e:b5:c3:14", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3a06e04-2f", "ovs_interfaceid": "c3a06e04-2fae-4c1e-bece-fd85d6e74f50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.873478] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52047cca-fc38-9bb2-7ed1-f2b98e2ce0fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009749} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.873949] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.874057] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] d361769c-bfc2-4c72-83f4-dc9b51f907a3/d361769c-bfc2-4c72-83f4-dc9b51f907a3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 990.874256] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46e3c683-696b-49d0-995a-1118e11a18a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.885532] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 990.885532] env[69992]: value = "task-2896988" [ 990.885532] env[69992]: _type = "Task" [ 990.885532] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.895835] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.937912] env[69992]: DEBUG oslo_vmware.api [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2896987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132993} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.938248] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.938460] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.938635] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.938810] env[69992]: INFO nova.compute.manager [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 990.939114] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.939320] env[69992]: DEBUG nova.compute.manager [-] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.939413] env[69992]: DEBUG nova.network.neutron [-] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 991.011484] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.013352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lock "eba81db1-973c-4981-baca-cb98e4087510" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.171634] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88957d9-c40e-42f5-affa-12b47786c6ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.181136] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c1bf37-7391-4405-8657-e6c109b7fe91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.213395] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd60f05-b77b-4468-afac-d29368462290 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.222882] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c47a9b-44c4-46cc-a8dc-d9efdfb79751 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.238468] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 991.326298] env[69992]: DEBUG nova.network.neutron [-] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.341029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.369662] env[69992]: DEBUG oslo_concurrency.lockutils [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] Releasing lock "refresh_cache-d361769c-bfc2-4c72-83f4-dc9b51f907a3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.370250] env[69992]: DEBUG nova.compute.manager [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Received event network-vif-deleted-9f2a98a5-6cca-48ee-84a6-66bf08b7e92f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.370250] env[69992]: INFO nova.compute.manager [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Neutron deleted interface 9f2a98a5-6cca-48ee-84a6-66bf08b7e92f; detaching it from the instance and deleting it from the info cache [ 991.370433] env[69992]: DEBUG nova.network.neutron [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.396963] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485752} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.396963] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] d361769c-bfc2-4c72-83f4-dc9b51f907a3/d361769c-bfc2-4c72-83f4-dc9b51f907a3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.396963] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.397132] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3601dd7-8c6c-401b-880b-52918ed78800 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.407957] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 991.407957] env[69992]: value = "task-2896989" [ 991.407957] env[69992]: _type = "Task" [ 991.407957] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.419226] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896989, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.498185] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 991.515341] env[69992]: INFO nova.compute.manager [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Detaching volume 794429f7-4ac5-40f0-a80a-f64e8e163e5d [ 991.534075] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.537125] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.537125] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.537125] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.537125] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.537125] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.537366] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.537366] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.537366] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.537366] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.537366] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.537528] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c807a114-d5ef-4944-9500-64dc5cd18739 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.546447] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f079db-cfe6-4196-b47a-3ed82c49a298 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.569020] env[69992]: INFO nova.virt.block_device [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Attempting to driver detach volume 794429f7-4ac5-40f0-a80a-f64e8e163e5d from mountpoint /dev/sdb [ 991.569020] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 991.569020] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581935', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'name': 'volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'eba81db1-973c-4981-baca-cb98e4087510', 'attached_at': '', 'detached_at': '', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'serial': '794429f7-4ac5-40f0-a80a-f64e8e163e5d'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 991.569020] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75466bcc-994b-4f53-b5be-93ecfa247d83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.592885] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc2f835-a957-4a5e-9221-7a87fb6ac348 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.601959] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e981c37-c70a-44b6-a330-64121233a443 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.624463] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96583ed4-72d3-4412-80c4-03c790c3fb4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.640693] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] The volume has not been displaced from its original location: [datastore2] volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d/volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 991.646119] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 991.646846] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4fc9f4b-59ab-4f85-9fd3-3622da9c3ea4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.666454] env[69992]: DEBUG oslo_vmware.api [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Waiting for the task: (returnval){ [ 991.666454] env[69992]: value = "task-2896990" [ 991.666454] env[69992]: _type = "Task" [ 991.666454] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.675081] env[69992]: DEBUG oslo_vmware.api [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896990, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.761297] env[69992]: ERROR nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [req-dd22d8d9-3ead-4aa6-8f2b-30c4ae85f93c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd22d8d9-3ead-4aa6-8f2b-30c4ae85f93c"}]} [ 991.781028] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 991.801402] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 991.801616] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 991.822298] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 991.828244] env[69992]: INFO nova.compute.manager [-] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Took 1.25 seconds to deallocate network for instance. [ 991.846277] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 991.871570] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Successfully updated port: bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.882165] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-112d3e21-77da-4352-83a7-9c31d72e4e57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.901159] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0355dbbf-31ca-4982-8f0c-ffdd7b5bf1b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.926324] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896989, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066425} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.926602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.929322] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cb2ea6-9df9-4d2a-984a-fa2e9a19b896 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.951013] env[69992]: DEBUG nova.compute.manager [req-13e64182-6672-4673-934e-d6b7d21dd5e1 req-2e6f1d0f-f4e8-4659-aa3a-ce2ce4fce6d6 service nova] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Detach interface failed, port_id=9f2a98a5-6cca-48ee-84a6-66bf08b7e92f, reason: Instance 97cb6372-3f4e-427d-9509-7e6c43aa2e7b could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 991.972587] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] d361769c-bfc2-4c72-83f4-dc9b51f907a3/d361769c-bfc2-4c72-83f4-dc9b51f907a3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.975484] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc0f404c-0c88-4a13-9273-9a52bfb56c40 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.004022] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 992.004022] env[69992]: value = "task-2896991" [ 992.004022] env[69992]: _type = "Task" [ 992.004022] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.016323] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896991, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.063338] env[69992]: DEBUG nova.compute.manager [req-fb198f17-9375-4b5b-90ac-1ebded1b7efd req-ebb6dc72-5701-45d2-868b-3c8786445a44 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Received event network-vif-deleted-8b0b0a96-e1ab-4c92-b8d0-af130d30c696 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.063338] env[69992]: INFO nova.compute.manager [req-fb198f17-9375-4b5b-90ac-1ebded1b7efd req-ebb6dc72-5701-45d2-868b-3c8786445a44 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Neutron deleted interface 8b0b0a96-e1ab-4c92-b8d0-af130d30c696; detaching it from the instance and deleting it from the info cache [ 992.063338] env[69992]: DEBUG nova.network.neutron [req-fb198f17-9375-4b5b-90ac-1ebded1b7efd req-ebb6dc72-5701-45d2-868b-3c8786445a44 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.177232] env[69992]: DEBUG oslo_vmware.api [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896990, 'name': ReconfigVM_Task, 'duration_secs': 0.23689} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.177657] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 992.182603] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a52f726a-47dd-4531-89cb-57e9757f062b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.201850] env[69992]: DEBUG oslo_vmware.api [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Waiting for the task: (returnval){ [ 992.201850] env[69992]: value = "task-2896992" [ 992.201850] env[69992]: _type = "Task" [ 992.201850] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.207397] env[69992]: DEBUG nova.compute.manager [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Received event network-vif-deleted-d71a7a39-b1d8-4236-9da5-ebc02ecd90b6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.209468] env[69992]: DEBUG nova.compute.manager [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Received event network-vif-plugged-bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.209468] env[69992]: DEBUG oslo_concurrency.lockutils [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] Acquiring lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.209468] env[69992]: DEBUG oslo_concurrency.lockutils [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.209468] env[69992]: DEBUG oslo_concurrency.lockutils [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.209468] env[69992]: DEBUG nova.compute.manager [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] No waiting events found dispatching network-vif-plugged-bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 992.210265] env[69992]: WARNING nova.compute.manager [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Received unexpected event network-vif-plugged-bd5193f4-aa70-4668-af0b-696f84cf0080 for instance with vm_state building and task_state spawning. [ 992.210265] env[69992]: DEBUG nova.compute.manager [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Received event network-changed-bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.210265] env[69992]: DEBUG nova.compute.manager [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Refreshing instance network info cache due to event network-changed-bd5193f4-aa70-4668-af0b-696f84cf0080. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 992.210265] env[69992]: DEBUG oslo_concurrency.lockutils [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] Acquiring lock "refresh_cache-f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.210265] env[69992]: DEBUG oslo_concurrency.lockutils [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] Acquired lock "refresh_cache-f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.210483] env[69992]: DEBUG nova.network.neutron [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Refreshing network info cache for port bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.219565] env[69992]: DEBUG oslo_vmware.api [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.337117] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.375351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "refresh_cache-f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.386649] env[69992]: DEBUG nova.network.neutron [-] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.511401] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896991, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.566912] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c59f1c34-3d7c-44a4-85b7-5077090b6d01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.576234] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981ee1cf-1130-4ecb-a884-90174321c2d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.581706] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab4e430-7c30-4a9d-b3fb-ef9a838a9d9d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.595568] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb61988-0280-48f3-9405-226726140b79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.641469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7382818e-6162-4853-aa3f-6f86a55a3e8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.644487] env[69992]: DEBUG nova.compute.manager [req-fb198f17-9375-4b5b-90ac-1ebded1b7efd req-ebb6dc72-5701-45d2-868b-3c8786445a44 service nova] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Detach interface failed, port_id=8b0b0a96-e1ab-4c92-b8d0-af130d30c696, reason: Instance a9274dfc-afbd-419b-a98b-053d71a05d7c could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 992.650857] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bfac38-79d0-4b29-be5c-dbb1d51c8af7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.665723] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.712409] env[69992]: DEBUG oslo_vmware.api [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Task: {'id': task-2896992, 'name': ReconfigVM_Task, 'duration_secs': 0.153619} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.712684] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] [instance: eba81db1-973c-4981-baca-cb98e4087510] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581935', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'name': 'volume-794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'eba81db1-973c-4981-baca-cb98e4087510', 'attached_at': '', 'detached_at': '', 'volume_id': '794429f7-4ac5-40f0-a80a-f64e8e163e5d', 'serial': '794429f7-4ac5-40f0-a80a-f64e8e163e5d'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 992.744734] env[69992]: DEBUG nova.network.neutron [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.812170] env[69992]: DEBUG nova.network.neutron [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.889056] env[69992]: INFO nova.compute.manager [-] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Took 1.95 seconds to deallocate network for instance. [ 993.012762] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896991, 'name': ReconfigVM_Task, 'duration_secs': 0.925294} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.013128] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Reconfigured VM instance instance-00000025 to attach disk [datastore2] d361769c-bfc2-4c72-83f4-dc9b51f907a3/d361769c-bfc2-4c72-83f4-dc9b51f907a3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 993.013877] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d05d6d7-9a20-447b-9765-a98793706807 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.022605] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 993.022605] env[69992]: value = "task-2896993" [ 993.022605] env[69992]: _type = "Task" [ 993.022605] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.032585] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896993, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.202180] env[69992]: DEBUG nova.scheduler.client.report [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 993.202180] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 67 to 68 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 993.202180] env[69992]: DEBUG nova.compute.provider_tree [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.283721] env[69992]: DEBUG nova.objects.instance [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lazy-loading 'flavor' on Instance uuid eba81db1-973c-4981-baca-cb98e4087510 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.315255] env[69992]: DEBUG oslo_concurrency.lockutils [req-0a0cd063-b2e4-44a7-abbd-d4af4cdefe09 req-327f2c04-438b-43ab-8a29-095ad3b910bf service nova] Releasing lock "refresh_cache-f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.315928] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "refresh_cache-f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.316124] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.395681] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.534988] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896993, 'name': Rename_Task, 'duration_secs': 0.212496} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.535288] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 993.535561] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa6385ea-6729-4134-97dc-f26b5e932016 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.543634] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 993.543634] env[69992]: value = "task-2896994" [ 993.543634] env[69992]: _type = "Task" [ 993.543634] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.552583] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.707087] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.229s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.707798] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 993.710752] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.489s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.711031] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.713140] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.386s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.714556] env[69992]: INFO nova.compute.claims [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.735602] env[69992]: INFO nova.scheduler.client.report [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Deleted allocations for instance c205f559-7fe6-4d7e-beba-2fc96b89d705 [ 993.846972] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.021575] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Updating instance_info_cache with network_info: [{"id": "bd5193f4-aa70-4668-af0b-696f84cf0080", "address": "fa:16:3e:1f:9f:eb", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd5193f4-aa", "ovs_interfaceid": "bd5193f4-aa70-4668-af0b-696f84cf0080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.057140] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896994, 'name': PowerOnVM_Task, 'duration_secs': 0.476524} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.057140] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 994.057495] env[69992]: INFO nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Took 6.56 seconds to spawn the instance on the hypervisor. [ 994.058913] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 994.058913] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c5ab18-71dc-4b26-b1c7-95f3bb2d5d92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.221168] env[69992]: DEBUG nova.compute.utils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 994.222803] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 994.223015] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.245252] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e82bdd40-10d7-4d74-add8-51270c48645f tempest-DeleteServersAdminTestJSON-90993296 tempest-DeleteServersAdminTestJSON-90993296-project-member] Lock "c205f559-7fe6-4d7e-beba-2fc96b89d705" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.982s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.289999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25578c3b-523f-4f54-aa4e-5f11365cede7 tempest-VolumesAssistedSnapshotsTest-1612343283 tempest-VolumesAssistedSnapshotsTest-1612343283-project-admin] Lock "eba81db1-973c-4981-baca-cb98e4087510" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.278s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.308227] env[69992]: DEBUG nova.policy [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0dbc40b90a6e4dad8d4977afc0a5c18d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2c061f9a6740919869fc68b54d074e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 994.524858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "refresh_cache-f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.525218] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Instance network_info: |[{"id": "bd5193f4-aa70-4668-af0b-696f84cf0080", "address": "fa:16:3e:1f:9f:eb", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd5193f4-aa", "ovs_interfaceid": "bd5193f4-aa70-4668-af0b-696f84cf0080", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 994.525649] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:9f:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4b6ddb2-2e19-4031-9b22-add90d41a114', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd5193f4-aa70-4668-af0b-696f84cf0080', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.534069] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 994.534311] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.535014] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb7c5367-dc97-40c5-8815-c3ce2d78050e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.557660] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.557660] env[69992]: value = "task-2896996" [ 994.557660] env[69992]: _type = "Task" [ 994.557660] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.574084] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896996, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.581045] env[69992]: INFO nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Took 53.89 seconds to build instance. [ 994.727514] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 994.818645] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Successfully created port: 3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.071456] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2896996, 'name': CreateVM_Task, 'duration_secs': 0.355876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.071751] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.072205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.072565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.072672] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 995.072936] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7e595cf-f800-488d-a1a3-a2d25dfdf819 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.078922] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 995.078922] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b29092-df60-1aaf-32b0-b2bb073f02a2" [ 995.078922] env[69992]: _type = "Task" [ 995.078922] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.084993] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.167s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.092409] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b29092-df60-1aaf-32b0-b2bb073f02a2, 'name': SearchDatastore_Task, 'duration_secs': 0.011179} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.092680] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.092901] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 995.093603] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.093603] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.093603] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 995.095530] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d676972-0831-46ff-84c3-8e96fdcb9783 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.106902] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 995.106902] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 995.110931] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c50f6be-96b1-43f7-ac67-8d9066a263a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.115055] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 995.115055] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f6b33b-8514-b951-7442-b1c61de2e1a2" [ 995.115055] env[69992]: _type = "Task" [ 995.115055] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.123994] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f6b33b-8514-b951-7442-b1c61de2e1a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.381593] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b817ed68-c991-4d00-8296-da2d1922f36c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.391171] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8586cb1a-5c07-4614-8f3b-6e7670bcdca5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.424954] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53617075-b2ad-4fe1-bf56-6e5028256e27 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.435595] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648cb78a-fe0a-46c9-bea6-48007f960df7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.452869] env[69992]: DEBUG nova.compute.provider_tree [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.587892] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 995.627841] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f6b33b-8514-b951-7442-b1c61de2e1a2, 'name': SearchDatastore_Task, 'duration_secs': 0.009323} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.629453] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-688135fa-5d41-444f-92d4-718d7ca75c4d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.636599] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 995.636599] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523cd849-e9e6-eda3-c8c8-fb2e81efb294" [ 995.636599] env[69992]: _type = "Task" [ 995.636599] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.646097] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523cd849-e9e6-eda3-c8c8-fb2e81efb294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.738861] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 995.766513] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.766653] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.766812] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.767022] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.767179] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.767337] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.767548] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.767704] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.767872] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.768045] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.768228] env[69992]: DEBUG nova.virt.hardware [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.769115] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b2c053-f206-47b9-adb3-5bc2ab4fad94 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.777414] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cf0f1a-9861-4d16-93f7-2a252cf6367a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.957048] env[69992]: DEBUG nova.scheduler.client.report [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.111897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.148228] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523cd849-e9e6-eda3-c8c8-fb2e81efb294, 'name': SearchDatastore_Task, 'duration_secs': 0.009726} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.148503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.148766] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75/f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.149112] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4305e0f5-f161-4e4b-8eeb-b3681e17ca82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.157343] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 996.157343] env[69992]: value = "task-2896997" [ 996.157343] env[69992]: _type = "Task" [ 996.157343] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.166355] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896997, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.461515] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.461949] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 996.466018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.127s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.467817] env[69992]: INFO nova.compute.claims [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.516784] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Successfully updated port: 3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.554017] env[69992]: DEBUG nova.compute.manager [req-65ac6679-a5fc-4cfc-861c-4e82c03303a4 req-48e29dde-9a36-4ffa-bb47-dd8ea6566699 service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Received event network-vif-plugged-3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 996.554017] env[69992]: DEBUG oslo_concurrency.lockutils [req-65ac6679-a5fc-4cfc-861c-4e82c03303a4 req-48e29dde-9a36-4ffa-bb47-dd8ea6566699 service nova] Acquiring lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.554017] env[69992]: DEBUG oslo_concurrency.lockutils [req-65ac6679-a5fc-4cfc-861c-4e82c03303a4 req-48e29dde-9a36-4ffa-bb47-dd8ea6566699 service nova] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.554017] env[69992]: DEBUG oslo_concurrency.lockutils [req-65ac6679-a5fc-4cfc-861c-4e82c03303a4 req-48e29dde-9a36-4ffa-bb47-dd8ea6566699 service nova] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.554017] env[69992]: DEBUG nova.compute.manager [req-65ac6679-a5fc-4cfc-861c-4e82c03303a4 req-48e29dde-9a36-4ffa-bb47-dd8ea6566699 service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] No waiting events found dispatching network-vif-plugged-3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.554478] env[69992]: WARNING nova.compute.manager [req-65ac6679-a5fc-4cfc-861c-4e82c03303a4 req-48e29dde-9a36-4ffa-bb47-dd8ea6566699 service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Received unexpected event network-vif-plugged-3bb69960-1f9d-420f-957d-a590b9e5bd9d for instance with vm_state building and task_state spawning. [ 996.670286] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896997, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467681} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.670630] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75/f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.670912] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.671258] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e41b0acc-fe6c-4f43-89e9-7cb84cc1607b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.680323] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 996.680323] env[69992]: value = "task-2896998" [ 996.680323] env[69992]: _type = "Task" [ 996.680323] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.689917] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.897318] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.897551] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.978020] env[69992]: DEBUG nova.compute.utils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 996.981079] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 996.981316] env[69992]: DEBUG nova.network.neutron [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 997.028620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "refresh_cache-40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.028620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "refresh_cache-40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.028620] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.059614] env[69992]: DEBUG nova.policy [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ff51477d6894eec85274a6803184852', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b7bf7c044664b77aee07f763794eb67', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 997.190712] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07158} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.190976] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 997.191878] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030a682e-443d-4ce1-ad3e-0ec6bd51d2ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.215096] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75/f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 997.215606] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66d29042-bfb4-4e2f-9e01-b74c66d059bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.240014] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 997.240014] env[69992]: value = "task-2896999" [ 997.240014] env[69992]: _type = "Task" [ 997.240014] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.250222] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896999, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.448604] env[69992]: DEBUG nova.network.neutron [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Successfully created port: bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 997.482481] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 997.560217] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 997.753250] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2896999, 'name': ReconfigVM_Task, 'duration_secs': 0.279079} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.754454] env[69992]: DEBUG nova.network.neutron [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Updating instance_info_cache with network_info: [{"id": "3bb69960-1f9d-420f-957d-a590b9e5bd9d", "address": "fa:16:3e:2f:9a:7b", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb69960-1f", "ovs_interfaceid": "3bb69960-1f9d-420f-957d-a590b9e5bd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.755633] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Reconfigured VM instance instance-00000026 to attach disk [datastore2] f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75/f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.758861] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c203910b-016d-4912-8d60-f716397ed148 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.770652] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 997.770652] env[69992]: value = "task-2897000" [ 997.770652] env[69992]: _type = "Task" [ 997.770652] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.786737] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897000, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.839395] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.839624] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "eba81db1-973c-4981-baca-cb98e4087510" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.839869] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "eba81db1-973c-4981-baca-cb98e4087510-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.840079] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "eba81db1-973c-4981-baca-cb98e4087510-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.840254] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "eba81db1-973c-4981-baca-cb98e4087510-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.842781] env[69992]: INFO nova.compute.manager [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Terminating instance [ 997.990698] env[69992]: INFO nova.virt.block_device [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Booting with volume a203e79e-9126-47e8-96d7-9c0a57c68179 at /dev/sda [ 998.033535] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8e91a1c-0272-4c2e-ad8a-bd9ccb2bc55e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.049739] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a151ce15-55c4-40d3-a754-5988f05edde5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.091742] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63d7e4a0-5e0e-4f67-92a5-f4dc72c93175 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.094627] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8606df3-5040-47ee-aca0-a3e23b9997ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.103418] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f800349b-604d-4bd7-bda9-ce659cd49b62 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.108872] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fad0783-5eb5-4de9-abbb-2d1b160c7343 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.166228] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a473433-b0ed-4b6f-bb0e-d9065e8bece6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.169477] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c571f2b6-36e9-444e-bc52-90936a8e2a94 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.178497] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91781065-4f37-4b0f-9c87-e34693b9c59c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.181799] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ac4302-3c3c-408a-a565-9bb0e4f2ed96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.196932] env[69992]: DEBUG nova.compute.provider_tree [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.201981] env[69992]: DEBUG nova.virt.block_device [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updating existing volume attachment record: 2360acb9-32a2-45f3-a087-37846ff7f5a1 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 998.260098] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "refresh_cache-40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.260460] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Instance network_info: |[{"id": "3bb69960-1f9d-420f-957d-a590b9e5bd9d", "address": "fa:16:3e:2f:9a:7b", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb69960-1f", "ovs_interfaceid": "3bb69960-1f9d-420f-957d-a590b9e5bd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 998.260877] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:9a:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4b6ddb2-2e19-4031-9b22-add90d41a114', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bb69960-1f9d-420f-957d-a590b9e5bd9d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.269898] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.270155] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.270394] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25fdc033-e186-43c9-a75f-6d3a3921c4f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.294660] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897000, 'name': Rename_Task, 'duration_secs': 0.320341} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.295990] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 998.296244] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.296244] env[69992]: value = "task-2897001" [ 998.296244] env[69992]: _type = "Task" [ 998.296244] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.296427] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4dba2c05-8625-4a2e-800f-e7d80f4860fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.309025] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897001, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.309025] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 998.309025] env[69992]: value = "task-2897002" [ 998.309025] env[69992]: _type = "Task" [ 998.309025] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.318084] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.346231] env[69992]: DEBUG nova.compute.manager [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.346507] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.347479] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c069dbf7-1201-463e-b3bc-2eec473c5675 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.356627] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.356918] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a481bef-7ec6-4176-b7ab-d08db0eec1b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.364855] env[69992]: DEBUG oslo_vmware.api [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 998.364855] env[69992]: value = "task-2897003" [ 998.364855] env[69992]: _type = "Task" [ 998.364855] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.376122] env[69992]: DEBUG oslo_vmware.api [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2897003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.575785] env[69992]: DEBUG nova.compute.manager [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Received event network-changed-3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 998.575785] env[69992]: DEBUG nova.compute.manager [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Refreshing instance network info cache due to event network-changed-3bb69960-1f9d-420f-957d-a590b9e5bd9d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 998.575959] env[69992]: DEBUG oslo_concurrency.lockutils [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] Acquiring lock "refresh_cache-40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.576114] env[69992]: DEBUG oslo_concurrency.lockutils [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] Acquired lock "refresh_cache-40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.576247] env[69992]: DEBUG nova.network.neutron [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Refreshing network info cache for port 3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.725815] env[69992]: ERROR nova.scheduler.client.report [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [req-0242e1d4-0cba-49fd-b917-f3fbe0d3ae8c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0242e1d4-0cba-49fd-b917-f3fbe0d3ae8c"}]} [ 998.745141] env[69992]: DEBUG nova.scheduler.client.report [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 998.763213] env[69992]: DEBUG nova.scheduler.client.report [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 998.763468] env[69992]: DEBUG nova.compute.provider_tree [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.779532] env[69992]: DEBUG nova.scheduler.client.report [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 998.803949] env[69992]: DEBUG nova.scheduler.client.report [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 998.813476] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897001, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.822846] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897002, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.879169] env[69992]: DEBUG oslo_vmware.api [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2897003, 'name': PowerOffVM_Task, 'duration_secs': 0.196297} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.879517] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.879727] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.880052] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-371aa005-c069-4d80-bd8c-735b98ff51ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.995630] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.995957] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.996041] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Deleting the datastore file [datastore2] eba81db1-973c-4981-baca-cb98e4087510 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.996312] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d272df81-e853-4775-ad37-e180183cd780 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.004471] env[69992]: DEBUG oslo_vmware.api [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for the task: (returnval){ [ 999.004471] env[69992]: value = "task-2897005" [ 999.004471] env[69992]: _type = "Task" [ 999.004471] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.018402] env[69992]: DEBUG oslo_vmware.api [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2897005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.285308] env[69992]: DEBUG nova.network.neutron [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Successfully updated port: bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.316880] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897001, 'name': CreateVM_Task, 'duration_secs': 0.70403} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.318760] env[69992]: DEBUG nova.network.neutron [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Updated VIF entry in instance network info cache for port 3bb69960-1f9d-420f-957d-a590b9e5bd9d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 999.319226] env[69992]: DEBUG nova.network.neutron [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Updating instance_info_cache with network_info: [{"id": "3bb69960-1f9d-420f-957d-a590b9e5bd9d", "address": "fa:16:3e:2f:9a:7b", "network": {"id": "d897c26f-d462-458f-b9c9-0e2535d7e3f7", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-47161419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e2c061f9a6740919869fc68b54d074e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb69960-1f", "ovs_interfaceid": "3bb69960-1f9d-420f-957d-a590b9e5bd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.324129] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.325087] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.326475] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.327914] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 999.332141] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dec6fce8-bc3e-47d4-b9b3-65690a1f312c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.340115] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897002, 'name': PowerOnVM_Task, 'duration_secs': 0.649422} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.341742] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 999.342199] env[69992]: INFO nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Took 7.84 seconds to spawn the instance on the hypervisor. [ 999.342492] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 999.343207] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 999.343207] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d3b540-88fc-e757-d3b3-34685ae81fe8" [ 999.343207] env[69992]: _type = "Task" [ 999.343207] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.344165] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf92e25-4db9-4bd0-9c09-96cd41adaeb6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.364587] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d3b540-88fc-e757-d3b3-34685ae81fe8, 'name': SearchDatastore_Task, 'duration_secs': 0.018268} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.365293] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.365659] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.365994] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.366375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.366733] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.369924] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-842f5806-e63c-4d05-89d6-995c963a6c34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.384327] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.384327] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.384327] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9500ecc-57e3-47bd-9bd1-036eb98f377e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.394748] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 999.394748] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f79200-5add-8550-8488-1006259a9f69" [ 999.394748] env[69992]: _type = "Task" [ 999.394748] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.405521] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f79200-5add-8550-8488-1006259a9f69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.460893] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d523754a-7cba-4df0-99ef-970f259ce19a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.469368] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b75f24a-2928-4892-aecc-10348ba04649 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.499590] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10476802-cac5-4bf8-95f6-c054a13fb596 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.510651] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e991247e-8015-45c0-9c5c-7d16fcff0c74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.519604] env[69992]: DEBUG oslo_vmware.api [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Task: {'id': task-2897005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150516} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.528420] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.528633] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.528855] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.528973] env[69992]: INFO nova.compute.manager [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] [instance: eba81db1-973c-4981-baca-cb98e4087510] Took 1.18 seconds to destroy the instance on the hypervisor. [ 999.529249] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.529657] env[69992]: DEBUG nova.compute.provider_tree [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 999.531025] env[69992]: DEBUG nova.compute.manager [-] [instance: eba81db1-973c-4981-baca-cb98e4087510] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.531168] env[69992]: DEBUG nova.network.neutron [-] [instance: eba81db1-973c-4981-baca-cb98e4087510] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 999.787321] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquiring lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.787321] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquired lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.787321] env[69992]: DEBUG nova.network.neutron [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.825227] env[69992]: DEBUG oslo_concurrency.lockutils [req-691ca3ee-6b02-49ed-b75e-7bfe39547ae8 req-96d1efd8-8b2e-4b66-9822-c50d35dc845f service nova] Releasing lock "refresh_cache-40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.873774] env[69992]: INFO nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Took 53.85 seconds to build instance. [ 999.906855] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f79200-5add-8550-8488-1006259a9f69, 'name': SearchDatastore_Task, 'duration_secs': 0.023324} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.907860] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2b0232c-b2ba-485b-9fba-867c9ae55e13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.914663] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 999.914663] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526b78c0-6442-4680-e678-387f5364697f" [ 999.914663] env[69992]: _type = "Task" [ 999.914663] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.923513] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526b78c0-6442-4680-e678-387f5364697f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.079749] env[69992]: DEBUG nova.scheduler.client.report [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 69 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1000.080235] env[69992]: DEBUG nova.compute.provider_tree [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 69 to 70 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1000.080562] env[69992]: DEBUG nova.compute.provider_tree [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1000.311273] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1000.311826] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.312056] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.312674] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.312897] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.314776] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.314776] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.314776] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.314776] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.314776] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.315093] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.315093] env[69992]: DEBUG nova.virt.hardware [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1000.316188] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42d12d6-7e24-4521-9055-3643ecb3afa0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.326324] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055ca9ef-9c83-41c5-80e3-3d523d29fecc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.331083] env[69992]: DEBUG nova.network.neutron [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1000.350753] env[69992]: DEBUG nova.compute.manager [req-13ec38dd-3b08-4074-ba7e-94d81f8cfa7d req-59d1f8d2-1b17-454a-842e-cf7ec72bf380 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Received event network-vif-deleted-54980674-0d82-4eac-8cb8-3d49bf81e6f0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.350960] env[69992]: INFO nova.compute.manager [req-13ec38dd-3b08-4074-ba7e-94d81f8cfa7d req-59d1f8d2-1b17-454a-842e-cf7ec72bf380 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Neutron deleted interface 54980674-0d82-4eac-8cb8-3d49bf81e6f0; detaching it from the instance and deleting it from the info cache [ 1000.351115] env[69992]: DEBUG nova.network.neutron [req-13ec38dd-3b08-4074-ba7e-94d81f8cfa7d req-59d1f8d2-1b17-454a-842e-cf7ec72bf380 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.375526] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.426s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.426725] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526b78c0-6442-4680-e678-387f5364697f, 'name': SearchDatastore_Task, 'duration_secs': 0.040348} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.427224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.427547] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4/40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.427817] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b31d12a8-ad5a-4f8d-af43-88e27d00615e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.436534] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1000.436534] env[69992]: value = "task-2897006" [ 1000.436534] env[69992]: _type = "Task" [ 1000.436534] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.444649] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.488921] env[69992]: DEBUG nova.network.neutron [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updating instance_info_cache with network_info: [{"id": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "address": "fa:16:3e:23:07:fd", "network": {"id": "3823045c-df09-4c89-af5a-8eb2f8178ac8", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1085695135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b7bf7c044664b77aee07f763794eb67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf5a36db-df", "ovs_interfaceid": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.588031] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.122s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.588610] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1000.591563] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.966s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.593054] env[69992]: INFO nova.compute.claims [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.607599] env[69992]: DEBUG nova.compute.manager [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Received event network-vif-plugged-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.607898] env[69992]: DEBUG oslo_concurrency.lockutils [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] Acquiring lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.608157] env[69992]: DEBUG oslo_concurrency.lockutils [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.608388] env[69992]: DEBUG oslo_concurrency.lockutils [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.608600] env[69992]: DEBUG nova.compute.manager [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] No waiting events found dispatching network-vif-plugged-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1000.608795] env[69992]: WARNING nova.compute.manager [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Received unexpected event network-vif-plugged-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b for instance with vm_state building and task_state spawning. [ 1000.608984] env[69992]: DEBUG nova.compute.manager [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Received event network-changed-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.609221] env[69992]: DEBUG nova.compute.manager [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Refreshing instance network info cache due to event network-changed-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1000.609442] env[69992]: DEBUG oslo_concurrency.lockutils [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] Acquiring lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.777227] env[69992]: DEBUG nova.network.neutron [-] [instance: eba81db1-973c-4981-baca-cb98e4087510] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.853525] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cc73616-3222-4d16-b525-5b9b2010db14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.869432] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd442ef-310d-4036-94bd-5f0fc3bbb83f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.882859] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1000.919733] env[69992]: DEBUG nova.compute.manager [req-13ec38dd-3b08-4074-ba7e-94d81f8cfa7d req-59d1f8d2-1b17-454a-842e-cf7ec72bf380 service nova] [instance: eba81db1-973c-4981-baca-cb98e4087510] Detach interface failed, port_id=54980674-0d82-4eac-8cb8-3d49bf81e6f0, reason: Instance eba81db1-973c-4981-baca-cb98e4087510 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1000.947047] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464026} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.947254] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4/40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.948026] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.948026] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2385618a-ebc1-4cf3-8807-457fa79ad54f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.956034] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1000.956034] env[69992]: value = "task-2897007" [ 1000.956034] env[69992]: _type = "Task" [ 1000.956034] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.965586] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.993357] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Releasing lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.993699] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Instance network_info: |[{"id": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "address": "fa:16:3e:23:07:fd", "network": {"id": "3823045c-df09-4c89-af5a-8eb2f8178ac8", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1085695135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b7bf7c044664b77aee07f763794eb67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf5a36db-df", "ovs_interfaceid": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1000.994445] env[69992]: DEBUG oslo_concurrency.lockutils [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] Acquired lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.994644] env[69992]: DEBUG nova.network.neutron [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Refreshing network info cache for port bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.995930] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:07:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf5a36db-df8e-4dd4-9248-fdb5f256bc7b', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.003677] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Creating folder: Project (1b7bf7c044664b77aee07f763794eb67). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.004712] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39f1cf12-5f47-470b-a9c1-ea75ad4d2b6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.021472] env[69992]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1001.021574] env[69992]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69992) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1001.022188] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Folder already exists: Project (1b7bf7c044664b77aee07f763794eb67). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1001.022412] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Creating folder: Instances. Parent ref: group-v581879. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.022664] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65effcd9-51fc-4289-b78e-442a56f2c8a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.035766] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Created folder: Instances in parent group-v581879. [ 1001.036088] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.036361] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.036577] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea25ff60-94e3-4066-b523-80b6d20ac067 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.060213] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.060213] env[69992]: value = "task-2897010" [ 1001.060213] env[69992]: _type = "Task" [ 1001.060213] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.069372] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897010, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.097546] env[69992]: DEBUG nova.compute.utils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1001.100943] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1001.101121] env[69992]: DEBUG nova.network.neutron [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.159244] env[69992]: DEBUG nova.policy [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa75de4804b34977a2ab2f082b57c4dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef82945e1f93479ea4a19fbe1855870b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1001.280367] env[69992]: INFO nova.compute.manager [-] [instance: eba81db1-973c-4981-baca-cb98e4087510] Took 1.75 seconds to deallocate network for instance. [ 1001.412110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.454755] env[69992]: DEBUG nova.network.neutron [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Successfully created port: abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.467027] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173744} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.467310] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.468152] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd64985-f8a3-4b0f-942e-47c9484714f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.491485] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4/40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.491751] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d20fbea8-46d2-4530-a64a-c64ff4bdd088 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.516754] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1001.516754] env[69992]: value = "task-2897011" [ 1001.516754] env[69992]: _type = "Task" [ 1001.516754] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.525802] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.570854] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897010, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.602185] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1001.789564] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.888880] env[69992]: DEBUG nova.network.neutron [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updated VIF entry in instance network info cache for port bf5a36db-df8e-4dd4-9248-fdb5f256bc7b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.889261] env[69992]: DEBUG nova.network.neutron [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updating instance_info_cache with network_info: [{"id": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "address": "fa:16:3e:23:07:fd", "network": {"id": "3823045c-df09-4c89-af5a-8eb2f8178ac8", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1085695135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b7bf7c044664b77aee07f763794eb67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf5a36db-df", "ovs_interfaceid": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.031396] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897011, 'name': ReconfigVM_Task, 'duration_secs': 0.333996} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.031706] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4/40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.032679] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d52caac-397a-4c80-80a3-077d4c7a2245 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.040886] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1002.040886] env[69992]: value = "task-2897012" [ 1002.040886] env[69992]: _type = "Task" [ 1002.040886] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.049742] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897012, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.075518] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897010, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.266398] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac05263-811c-4152-aa96-74c7dfafb949 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.275343] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb54c95-1ab2-4f89-a636-78feaf3cf212 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.310333] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed210bfe-a206-4f64-bea3-bf08ca35d296 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.319033] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7547ed-072d-48dc-85a8-186a78acf268 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.333642] env[69992]: DEBUG nova.compute.provider_tree [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1002.392243] env[69992]: DEBUG oslo_concurrency.lockutils [req-49da41fd-388d-4348-aabb-8734619d1483 req-1e084c65-16e7-4097-9ebf-2cb215b85012 service nova] Releasing lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.552419] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897012, 'name': Rename_Task, 'duration_secs': 0.163847} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.552735] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.553016] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be6f4d3e-13ee-4907-979f-f74c463611ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.560659] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1002.560659] env[69992]: value = "task-2897013" [ 1002.560659] env[69992]: _type = "Task" [ 1002.560659] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.571923] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.575979] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897010, 'name': CreateVM_Task, 'duration_secs': 1.407594} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.576182] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.576861] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'disk_bus': None, 'device_type': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581892', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'name': 'volume-a203e79e-9126-47e8-96d7-9c0a57c68179', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'serial': 'a203e79e-9126-47e8-96d7-9c0a57c68179'}, 'attachment_id': '2360acb9-32a2-45f3-a087-37846ff7f5a1', 'delete_on_termination': True, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=69992) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1002.577120] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Root volume attach. Driver type: vmdk {{(pid=69992) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1002.577944] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbac443-a3c3-466b-8d8e-0b5c203debd8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.588184] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8afbe3-eb47-446f-9473-85f57518e3af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.594712] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcfdb23-0f44-4982-9aab-3133a8822c8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.601787] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-43b1a547-3641-4dfc-919a-a68c15dc91e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.609729] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1002.609729] env[69992]: value = "task-2897014" [ 1002.609729] env[69992]: _type = "Task" [ 1002.609729] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.616020] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1002.621084] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897014, 'name': RelocateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.652737] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1002.653029] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.653260] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1002.653482] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.653722] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1002.653925] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1002.654212] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1002.654562] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1002.654633] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1002.654850] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1002.655057] env[69992]: DEBUG nova.virt.hardware [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1002.655934] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5f3941-09ac-441b-b1bf-ea367430938c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.664527] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efdb0da-6443-4b65-9f2d-3f0c91edbd0f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.880880] env[69992]: DEBUG nova.scheduler.client.report [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1002.881507] env[69992]: DEBUG nova.compute.provider_tree [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 70 to 71 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1002.881507] env[69992]: DEBUG nova.compute.provider_tree [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1002.979822] env[69992]: DEBUG nova.network.neutron [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Successfully updated port: abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.987280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "e0b5ad16-f631-444c-a189-167e34574316" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.987280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "e0b5ad16-f631-444c-a189-167e34574316" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.987411] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "e0b5ad16-f631-444c-a189-167e34574316-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.987650] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "e0b5ad16-f631-444c-a189-167e34574316-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.987723] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "e0b5ad16-f631-444c-a189-167e34574316-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.990297] env[69992]: INFO nova.compute.manager [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Terminating instance [ 1003.024465] env[69992]: DEBUG nova.compute.manager [req-616470e0-cd60-4d1a-ada9-219b66189d47 req-c91803bc-8b92-4b10-9e81-18771e7e80f8 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received event network-vif-plugged-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.024778] env[69992]: DEBUG oslo_concurrency.lockutils [req-616470e0-cd60-4d1a-ada9-219b66189d47 req-c91803bc-8b92-4b10-9e81-18771e7e80f8 service nova] Acquiring lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.025060] env[69992]: DEBUG oslo_concurrency.lockutils [req-616470e0-cd60-4d1a-ada9-219b66189d47 req-c91803bc-8b92-4b10-9e81-18771e7e80f8 service nova] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.025261] env[69992]: DEBUG oslo_concurrency.lockutils [req-616470e0-cd60-4d1a-ada9-219b66189d47 req-c91803bc-8b92-4b10-9e81-18771e7e80f8 service nova] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.025477] env[69992]: DEBUG nova.compute.manager [req-616470e0-cd60-4d1a-ada9-219b66189d47 req-c91803bc-8b92-4b10-9e81-18771e7e80f8 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] No waiting events found dispatching network-vif-plugged-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.025683] env[69992]: WARNING nova.compute.manager [req-616470e0-cd60-4d1a-ada9-219b66189d47 req-c91803bc-8b92-4b10-9e81-18771e7e80f8 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received unexpected event network-vif-plugged-abab8d85-8633-4722-85d1-b21be464919d for instance with vm_state building and task_state spawning. [ 1003.072551] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897013, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.119466] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897014, 'name': RelocateVM_Task, 'duration_secs': 0.483661} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.119655] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1003.119862] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581892', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'name': 'volume-a203e79e-9126-47e8-96d7-9c0a57c68179', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'serial': 'a203e79e-9126-47e8-96d7-9c0a57c68179'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1003.120636] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3499aa8b-cffa-4f4b-9b01-18b38283e2b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.137990] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d85ca8-dc8d-4586-81f2-97b3d71fdcf9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.161128] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] volume-a203e79e-9126-47e8-96d7-9c0a57c68179/volume-a203e79e-9126-47e8-96d7-9c0a57c68179.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.161804] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90dc6822-4e4d-402b-b5d2-f4bc287a24e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.186321] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1003.186321] env[69992]: value = "task-2897015" [ 1003.186321] env[69992]: _type = "Task" [ 1003.186321] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.198417] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897015, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.386967] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.387619] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1003.390272] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.686s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.391667] env[69992]: INFO nova.compute.claims [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.482937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.483116] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.483276] env[69992]: DEBUG nova.network.neutron [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.496578] env[69992]: DEBUG nova.compute.manager [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1003.496718] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.500100] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facb8870-d51b-4580-97d8-564666e18cbe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.508317] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.508684] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdd1d9fc-7dee-4b0d-861a-bf27023335cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.518074] env[69992]: DEBUG oslo_vmware.api [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 1003.518074] env[69992]: value = "task-2897016" [ 1003.518074] env[69992]: _type = "Task" [ 1003.518074] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.527866] env[69992]: DEBUG oslo_vmware.api [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2897016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.573957] env[69992]: DEBUG oslo_vmware.api [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897013, 'name': PowerOnVM_Task, 'duration_secs': 0.532242} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.574493] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.575095] env[69992]: INFO nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Took 7.84 seconds to spawn the instance on the hypervisor. [ 1003.575267] env[69992]: DEBUG nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.576336] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd9fbb2-63ab-4595-9892-99146b777553 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.697435] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897015, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.898603] env[69992]: DEBUG nova.compute.utils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.900117] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1003.900330] env[69992]: DEBUG nova.network.neutron [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1003.951515] env[69992]: DEBUG nova.policy [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d2ee1abedf4874bcb44b4076199da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b8716c4b7324052a3472734c655655a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1004.030885] env[69992]: DEBUG oslo_vmware.api [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2897016, 'name': PowerOffVM_Task, 'duration_secs': 0.410233} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.032234] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1004.032533] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.033020] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44428432-43f8-4da5-9f44-cd3accd929c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.044624] env[69992]: DEBUG nova.network.neutron [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1004.102215] env[69992]: INFO nova.compute.manager [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Took 56.34 seconds to build instance. [ 1004.110834] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.111024] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.111230] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Deleting the datastore file [datastore1] e0b5ad16-f631-444c-a189-167e34574316 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.111561] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54343ca2-18f9-47af-9eb6-fb2ee3329eca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.120563] env[69992]: DEBUG oslo_vmware.api [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for the task: (returnval){ [ 1004.120563] env[69992]: value = "task-2897018" [ 1004.120563] env[69992]: _type = "Task" [ 1004.120563] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.130580] env[69992]: DEBUG oslo_vmware.api [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2897018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.200077] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897015, 'name': ReconfigVM_Task, 'duration_secs': 0.62822} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.200382] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Reconfigured VM instance instance-00000028 to attach disk [datastore2] volume-a203e79e-9126-47e8-96d7-9c0a57c68179/volume-a203e79e-9126-47e8-96d7-9c0a57c68179.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.206145] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-269e026d-d9d9-468f-8e03-f25caae71299 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.224814] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1004.224814] env[69992]: value = "task-2897019" [ 1004.224814] env[69992]: _type = "Task" [ 1004.224814] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.236834] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.261403] env[69992]: DEBUG nova.network.neutron [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Successfully created port: 9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1004.326802] env[69992]: DEBUG nova.network.neutron [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.405507] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1004.610022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04328045-a2bc-46a7-a2cd-806d0cf8df9d tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.625s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.635381] env[69992]: DEBUG oslo_vmware.api [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Task: {'id': task-2897018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337484} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.638032] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.638151] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.638286] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.638456] env[69992]: INFO nova.compute.manager [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] [instance: e0b5ad16-f631-444c-a189-167e34574316] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1004.638696] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.639286] env[69992]: DEBUG nova.compute.manager [-] [instance: e0b5ad16-f631-444c-a189-167e34574316] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1004.639286] env[69992]: DEBUG nova.network.neutron [-] [instance: e0b5ad16-f631-444c-a189-167e34574316] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.743680] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897019, 'name': ReconfigVM_Task, 'duration_secs': 0.161901} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.743680] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581892', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'name': 'volume-a203e79e-9126-47e8-96d7-9c0a57c68179', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'serial': 'a203e79e-9126-47e8-96d7-9c0a57c68179'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1004.743871] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bb77c31-138e-4108-855e-2cb02ce6332c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.753062] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1004.753062] env[69992]: value = "task-2897020" [ 1004.753062] env[69992]: _type = "Task" [ 1004.753062] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.761792] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897020, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.831601] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.831900] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Instance network_info: |[{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1004.832998] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:03:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abab8d85-8633-4722-85d1-b21be464919d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.839860] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Creating folder: Project (ef82945e1f93479ea4a19fbe1855870b). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.841233] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e74e787e-1e95-4db0-aed5-1105d2ab9477 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.860568] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Created folder: Project (ef82945e1f93479ea4a19fbe1855870b) in parent group-v581821. [ 1004.860824] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Creating folder: Instances. Parent ref: group-v581943. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1004.861983] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0e3345a-b766-4129-b7ce-b5e4d4398511 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.882320] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Created folder: Instances in parent group-v581943. [ 1004.882661] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.882916] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.883591] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-156dbd3a-7757-472e-8de2-9a62b85aeca5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.910406] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.910406] env[69992]: value = "task-2897023" [ 1004.910406] env[69992]: _type = "Task" [ 1004.910406] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.925784] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897023, 'name': CreateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.067037] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261c3cf5-376f-4fd9-805d-8a7b15614e7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.077942] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a96b81-26ba-4450-ba31-c5fb56ba10f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.110937] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e4f89c-d242-4c89-b699-42836681a430 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.114821] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.120516] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32430be4-2956-4f7b-877b-8fb44df141ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.136222] env[69992]: DEBUG nova.compute.provider_tree [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.264119] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897020, 'name': Rename_Task, 'duration_secs': 0.170471} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.264412] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.264659] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e3c129b-9ce9-40e7-954c-b8bb1508d652 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.272814] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1005.272814] env[69992]: value = "task-2897024" [ 1005.272814] env[69992]: _type = "Task" [ 1005.272814] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.281214] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897024, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.349346] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.352231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.352231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.352231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.352231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.354110] env[69992]: INFO nova.compute.manager [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Terminating instance [ 1005.421916] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1005.423939] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897023, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.448245] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1005.448490] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.448645] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1005.448820] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.448960] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1005.449129] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1005.449401] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1005.449587] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1005.450158] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1005.450158] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1005.450266] env[69992]: DEBUG nova.virt.hardware [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1005.451275] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234d19c7-4e47-48a9-816a-645e94e53067 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.461361] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495d2b5f-a3a2-43dc-8964-1a91e9037df5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.486837] env[69992]: DEBUG nova.compute.manager [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received event network-changed-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.487048] env[69992]: DEBUG nova.compute.manager [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing instance network info cache due to event network-changed-abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1005.487278] env[69992]: DEBUG oslo_concurrency.lockutils [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.487424] env[69992]: DEBUG oslo_concurrency.lockutils [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.487600] env[69992]: DEBUG nova.network.neutron [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing network info cache for port abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.634974] env[69992]: DEBUG nova.compute.manager [req-3e9a69aa-bffa-44a7-9be6-f2a894422627 req-4e7a89ad-7927-4ec7-81d3-789716d83845 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Received event network-vif-deleted-cc179b5e-5d8b-49eb-99ea-6adcb9e0af27 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.635713] env[69992]: INFO nova.compute.manager [req-3e9a69aa-bffa-44a7-9be6-f2a894422627 req-4e7a89ad-7927-4ec7-81d3-789716d83845 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Neutron deleted interface cc179b5e-5d8b-49eb-99ea-6adcb9e0af27; detaching it from the instance and deleting it from the info cache [ 1005.636595] env[69992]: DEBUG nova.network.neutron [req-3e9a69aa-bffa-44a7-9be6-f2a894422627 req-4e7a89ad-7927-4ec7-81d3-789716d83845 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.640962] env[69992]: DEBUG nova.scheduler.client.report [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.645862] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.787203] env[69992]: DEBUG oslo_vmware.api [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897024, 'name': PowerOnVM_Task, 'duration_secs': 0.463501} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.787806] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.788046] env[69992]: INFO nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Took 5.48 seconds to spawn the instance on the hypervisor. [ 1005.788250] env[69992]: DEBUG nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.789086] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932d7baf-f451-4fc9-bbfc-508b8f6ed8c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.809412] env[69992]: DEBUG nova.network.neutron [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Successfully updated port: 9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.863067] env[69992]: DEBUG nova.compute.manager [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1005.863197] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.865840] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c54ebae-b93e-4128-8f19-f27c7b0906f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.867972] env[69992]: DEBUG nova.network.neutron [-] [instance: e0b5ad16-f631-444c-a189-167e34574316] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.877342] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.878320] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4ac77ad-2593-4919-8a32-4cfbedadcc03 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.888645] env[69992]: DEBUG oslo_vmware.api [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1005.888645] env[69992]: value = "task-2897025" [ 1005.888645] env[69992]: _type = "Task" [ 1005.888645] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.898554] env[69992]: DEBUG oslo_vmware.api [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897025, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.924549] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897023, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.983057] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.983057] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.983265] env[69992]: INFO nova.compute.manager [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Shelving [ 1006.140319] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-217d619c-e116-4919-b2d8-ba9f5d80b685 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.148534] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.758s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.149043] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1006.152565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.660s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.152565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.154310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.035s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.156080] env[69992]: INFO nova.compute.claims [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.164173] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a99023-8479-4d40-b0e4-ca786e2b34a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.220542] env[69992]: DEBUG nova.compute.manager [req-3e9a69aa-bffa-44a7-9be6-f2a894422627 req-4e7a89ad-7927-4ec7-81d3-789716d83845 service nova] [instance: e0b5ad16-f631-444c-a189-167e34574316] Detach interface failed, port_id=cc179b5e-5d8b-49eb-99ea-6adcb9e0af27, reason: Instance e0b5ad16-f631-444c-a189-167e34574316 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1006.225329] env[69992]: INFO nova.scheduler.client.report [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleted allocations for instance 94a4a16e-926c-47ce-a5a7-0b216b7c5442 [ 1006.262600] env[69992]: DEBUG nova.network.neutron [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updated VIF entry in instance network info cache for port abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1006.263170] env[69992]: DEBUG nova.network.neutron [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.307609] env[69992]: INFO nova.compute.manager [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Took 53.01 seconds to build instance. [ 1006.312145] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-62936d27-5405-4d29-b3ff-c4d8a74ba440" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.312292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-62936d27-5405-4d29-b3ff-c4d8a74ba440" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.312441] env[69992]: DEBUG nova.network.neutron [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.370701] env[69992]: INFO nova.compute.manager [-] [instance: e0b5ad16-f631-444c-a189-167e34574316] Took 1.73 seconds to deallocate network for instance. [ 1006.399769] env[69992]: DEBUG oslo_vmware.api [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897025, 'name': PowerOffVM_Task, 'duration_secs': 0.285433} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.400026] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1006.400229] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1006.400485] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd36189e-fc1a-4fa2-9366-9f38a3afbe54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.422176] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897023, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.497566] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1006.497796] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1006.497975] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleting the datastore file [datastore2] d361769c-bfc2-4c72-83f4-dc9b51f907a3 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1006.498826] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe5ec42e-d281-4671-a533-92ef211aa835 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.506735] env[69992]: DEBUG oslo_vmware.api [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1006.506735] env[69992]: value = "task-2897027" [ 1006.506735] env[69992]: _type = "Task" [ 1006.506735] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.515606] env[69992]: DEBUG oslo_vmware.api [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.678839] env[69992]: DEBUG nova.compute.utils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1006.682852] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1006.682983] env[69992]: DEBUG nova.network.neutron [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.722921] env[69992]: DEBUG nova.policy [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc6792edfe6245d2ba77a14aba041ca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '658cab8ee4194f7f98dd07de450f248b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1006.733760] env[69992]: DEBUG oslo_concurrency.lockutils [None req-38c08824-d61d-402f-872e-2209c64205ed tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "94a4a16e-926c-47ce-a5a7-0b216b7c5442" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.772s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.766309] env[69992]: DEBUG oslo_concurrency.lockutils [req-76037d65-54db-4e61-a4b5-45c4550a073d req-614b2dfe-759b-4a19-a252-6470ded224b6 service nova] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.810379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71cd9e9f-4ba4-4c59-939b-6fb91ee01310 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.619s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.882242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.883690] env[69992]: DEBUG nova.network.neutron [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.923272] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897023, 'name': CreateVM_Task, 'duration_secs': 1.944191} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.923452] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.924170] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.924337] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.924641] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1006.924901] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12036065-d472-4240-ac66-050a49fe5094 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.931796] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1006.931796] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e5778c-06f2-8093-6608-78a56dbe7cab" [ 1006.931796] env[69992]: _type = "Task" [ 1006.931796] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.940654] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e5778c-06f2-8093-6608-78a56dbe7cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.994420] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.994420] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0b255a8-c401-44c2-b7a9-a2c05b6a8a5d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.006204] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1007.006204] env[69992]: value = "task-2897028" [ 1007.006204] env[69992]: _type = "Task" [ 1007.006204] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.020767] env[69992]: DEBUG oslo_vmware.api [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.449754} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.028717] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1007.028717] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1007.028717] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1007.028717] env[69992]: INFO nova.compute.manager [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1007.028717] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.028908] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.028908] env[69992]: DEBUG nova.compute.manager [-] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1007.028908] env[69992]: DEBUG nova.network.neutron [-] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1007.067640] env[69992]: DEBUG nova.network.neutron [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Successfully created port: a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1007.187024] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1007.276842] env[69992]: DEBUG nova.network.neutron [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Updating instance_info_cache with network_info: [{"id": "9d7cbf3a-bf72-47c9-a580-0464b06e8c89", "address": "fa:16:3e:3c:c2:07", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7cbf3a-bf", "ovs_interfaceid": "9d7cbf3a-bf72-47c9-a580-0464b06e8c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.314368] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1007.449852] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e5778c-06f2-8093-6608-78a56dbe7cab, 'name': SearchDatastore_Task, 'duration_secs': 0.019199} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.449852] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.449852] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.449852] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.450430] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.450430] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.450430] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fa45c70-be21-4aa4-8832-26e43194732d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.466302] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.466302] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.466302] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d389db6-eb1a-4b59-bca2-b30aefc52ba9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.472709] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1007.472709] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52267fd4-5431-d369-ba77-b66a494d782e" [ 1007.472709] env[69992]: _type = "Task" [ 1007.472709] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.486679] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52267fd4-5431-d369-ba77-b66a494d782e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.519101] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897028, 'name': PowerOffVM_Task, 'duration_secs': 0.45165} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.519101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.519567] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba173f4-b7a4-4e02-be37-6513ce53eeb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.542327] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f26baa-39a8-4241-9ab9-e2ad6ab23d57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.657396] env[69992]: DEBUG nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Received event network-vif-plugged-9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.657640] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] Acquiring lock "62936d27-5405-4d29-b3ff-c4d8a74ba440-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.657851] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.658028] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.658198] env[69992]: DEBUG nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] No waiting events found dispatching network-vif-plugged-9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1007.658362] env[69992]: WARNING nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Received unexpected event network-vif-plugged-9d7cbf3a-bf72-47c9-a580-0464b06e8c89 for instance with vm_state building and task_state spawning. [ 1007.658517] env[69992]: DEBUG nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Received event network-changed-9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.658672] env[69992]: DEBUG nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Refreshing instance network info cache due to event network-changed-9d7cbf3a-bf72-47c9-a580-0464b06e8c89. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1007.658831] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] Acquiring lock "refresh_cache-62936d27-5405-4d29-b3ff-c4d8a74ba440" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.779666] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-62936d27-5405-4d29-b3ff-c4d8a74ba440" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.780054] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Instance network_info: |[{"id": "9d7cbf3a-bf72-47c9-a580-0464b06e8c89", "address": "fa:16:3e:3c:c2:07", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7cbf3a-bf", "ovs_interfaceid": "9d7cbf3a-bf72-47c9-a580-0464b06e8c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1007.782824] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] Acquired lock "refresh_cache-62936d27-5405-4d29-b3ff-c4d8a74ba440" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.783017] env[69992]: DEBUG nova.network.neutron [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Refreshing network info cache for port 9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.784260] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:c2:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d7cbf3a-bf72-47c9-a580-0464b06e8c89', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.792120] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating folder: Project (5b8716c4b7324052a3472734c655655a). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.796091] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b6d8d08-25c2-4e74-9e17-9c2fcb01fc08 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.799306] env[69992]: DEBUG nova.compute.manager [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Received event network-changed-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.799480] env[69992]: DEBUG nova.compute.manager [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Refreshing instance network info cache due to event network-changed-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1007.799832] env[69992]: DEBUG oslo_concurrency.lockutils [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] Acquiring lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.799966] env[69992]: DEBUG oslo_concurrency.lockutils [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] Acquired lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.800650] env[69992]: DEBUG nova.network.neutron [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Refreshing network info cache for port bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.812998] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created folder: Project (5b8716c4b7324052a3472734c655655a) in parent group-v581821. [ 1007.813745] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating folder: Instances. Parent ref: group-v581946. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.813745] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cea4e705-1701-419e-89f3-aacb211a4046 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.827303] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created folder: Instances in parent group-v581946. [ 1007.827539] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.827907] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.828132] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42dc7647-9f16-4ceb-a6e2-4250bb0c4f55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.847262] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.856647] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.856647] env[69992]: value = "task-2897031" [ 1007.856647] env[69992]: _type = "Task" [ 1007.856647] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.869480] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897031, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.890787] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ee5fd9-1219-487e-b63f-11d1e541ff19 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.898953] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d93f48-e472-45c0-9f1a-334b87fe78d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.933937] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff7f51e-a137-420b-95b0-a7d8afd72f41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.943647] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d53a94c-92bb-46a3-b57b-d52fc3615401 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.958270] env[69992]: DEBUG nova.compute.provider_tree [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.984144] env[69992]: DEBUG nova.network.neutron [-] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.985464] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52267fd4-5431-d369-ba77-b66a494d782e, 'name': SearchDatastore_Task, 'duration_secs': 0.019059} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.986686] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d43764b-44a4-49bb-9e40-bbca2395be6f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.993991] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1007.993991] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fe70cf-753a-6631-6ee9-472a7fa3a321" [ 1007.993991] env[69992]: _type = "Task" [ 1007.993991] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.009531] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fe70cf-753a-6631-6ee9-472a7fa3a321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.058229] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1008.058575] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7e672752-c90f-420e-8daf-1704cbf65871 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.069451] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1008.069451] env[69992]: value = "task-2897032" [ 1008.069451] env[69992]: _type = "Task" [ 1008.069451] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.079669] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897032, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.090831] env[69992]: DEBUG nova.network.neutron [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Updated VIF entry in instance network info cache for port 9d7cbf3a-bf72-47c9-a580-0464b06e8c89. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.091400] env[69992]: DEBUG nova.network.neutron [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Updating instance_info_cache with network_info: [{"id": "9d7cbf3a-bf72-47c9-a580-0464b06e8c89", "address": "fa:16:3e:3c:c2:07", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d7cbf3a-bf", "ovs_interfaceid": "9d7cbf3a-bf72-47c9-a580-0464b06e8c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.118906] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.119298] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.119603] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.119887] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.120132] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.127266] env[69992]: INFO nova.compute.manager [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Terminating instance [ 1008.199715] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1008.233562] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1008.233817] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.234130] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1008.234349] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.234509] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1008.234674] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1008.235044] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1008.235109] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1008.235291] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1008.235466] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1008.235746] env[69992]: DEBUG nova.virt.hardware [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1008.237181] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be0838c-9064-46b5-8cb2-a2d3daa834de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.247286] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dd383d-8785-4164-b935-2efb12f7eb34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.371303] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897031, 'name': CreateVM_Task, 'duration_secs': 0.418726} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.371720] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.372705] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.373044] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.374165] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.374165] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eddd786e-9743-4e71-8a66-596f33cd4d07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.383230] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1008.383230] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f48473-64af-bb2b-06e4-56f05c08bf1a" [ 1008.383230] env[69992]: _type = "Task" [ 1008.383230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.393507] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f48473-64af-bb2b-06e4-56f05c08bf1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.464705] env[69992]: DEBUG nova.scheduler.client.report [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1008.487201] env[69992]: INFO nova.compute.manager [-] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Took 1.46 seconds to deallocate network for instance. [ 1008.506173] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fe70cf-753a-6631-6ee9-472a7fa3a321, 'name': SearchDatastore_Task, 'duration_secs': 0.015189} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.506445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.506733] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6/30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.506967] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4b12549-e631-4120-8fd8-4de207eeb126 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.516540] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1008.516540] env[69992]: value = "task-2897033" [ 1008.516540] env[69992]: _type = "Task" [ 1008.516540] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.530638] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.575990] env[69992]: DEBUG nova.network.neutron [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updated VIF entry in instance network info cache for port bf5a36db-df8e-4dd4-9248-fdb5f256bc7b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.576436] env[69992]: DEBUG nova.network.neutron [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updating instance_info_cache with network_info: [{"id": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "address": "fa:16:3e:23:07:fd", "network": {"id": "3823045c-df09-4c89-af5a-8eb2f8178ac8", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1085695135-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b7bf7c044664b77aee07f763794eb67", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf5a36db-df", "ovs_interfaceid": "bf5a36db-df8e-4dd4-9248-fdb5f256bc7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.586305] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897032, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.594336] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] Releasing lock "refresh_cache-62936d27-5405-4d29-b3ff-c4d8a74ba440" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.595134] env[69992]: DEBUG nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Received event network-vif-deleted-c3a06e04-2fae-4c1e-bece-fd85d6e74f50 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.595276] env[69992]: INFO nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Neutron deleted interface c3a06e04-2fae-4c1e-bece-fd85d6e74f50; detaching it from the instance and deleting it from the info cache [ 1008.595389] env[69992]: DEBUG nova.network.neutron [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.632323] env[69992]: DEBUG nova.compute.manager [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.632638] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.633904] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391a1551-c0ab-4a9f-95d6-dad8a0ff8d3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.643166] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.643443] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c081a75-a868-46b7-b490-fc8f3e8a3f5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.650453] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 1008.650453] env[69992]: value = "task-2897034" [ 1008.650453] env[69992]: _type = "Task" [ 1008.650453] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.660995] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.805226] env[69992]: DEBUG nova.network.neutron [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Successfully updated port: a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.897143] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f48473-64af-bb2b-06e4-56f05c08bf1a, 'name': SearchDatastore_Task, 'duration_secs': 0.012595} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.897470] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.897716] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.897964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.898128] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.898318] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.898713] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a02c6851-34ce-4953-bedf-851cc0ceedc7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.916650] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.916948] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.917982] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f640adca-8621-49ad-a1c2-46110528bb30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.932956] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1008.932956] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c43633-7243-e7a3-4dd7-2dbafb63ed0b" [ 1008.932956] env[69992]: _type = "Task" [ 1008.932956] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.943111] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c43633-7243-e7a3-4dd7-2dbafb63ed0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.972123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.972763] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1008.975942] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.617s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.977574] env[69992]: INFO nova.compute.claims [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.995187] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.030584] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897033, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.080526] env[69992]: DEBUG oslo_concurrency.lockutils [req-86808b67-0c6c-49bd-a26c-3c71add93809 req-1a42905c-4e58-4ea5-a046-39852e34b974 service nova] Releasing lock "refresh_cache-fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.081344] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897032, 'name': CreateSnapshot_Task, 'duration_secs': 0.566352} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.081344] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1009.082145] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa1e45f-b16f-4a55-9103-769ad8595c51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.098950] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b18e96b0-52a7-4a7a-b418-3efbf50bdc1c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.110473] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb791d5b-e664-42e8-ab55-bd5d2bb78927 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.148261] env[69992]: DEBUG nova.compute.manager [req-5bae9c78-3e3f-463e-ab47-ec677f1f0e8b req-3899c77d-e0cb-4cd0-aa57-f7bd84e21ad9 service nova] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Detach interface failed, port_id=c3a06e04-2fae-4c1e-bece-fd85d6e74f50, reason: Instance d361769c-bfc2-4c72-83f4-dc9b51f907a3 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1009.162186] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897034, 'name': PowerOffVM_Task, 'duration_secs': 0.273289} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.162575] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.162760] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.163021] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77a00361-568d-49b4-8b09-3fba5b207acc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.260704] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.260923] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.261115] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleting the datastore file [datastore2] 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.261831] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7654c2c-9de3-40a5-ad42-e008b88e1a9b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.271323] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 1009.271323] env[69992]: value = "task-2897036" [ 1009.271323] env[69992]: _type = "Task" [ 1009.271323] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.279732] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.307499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "refresh_cache-2b1a0943-d59a-441d-a2e6-8149106803b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.308758] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "refresh_cache-2b1a0943-d59a-441d-a2e6-8149106803b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.308758] env[69992]: DEBUG nova.network.neutron [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.447307] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c43633-7243-e7a3-4dd7-2dbafb63ed0b, 'name': SearchDatastore_Task, 'duration_secs': 0.059251} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.447887] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5da9f8b5-435d-4fad-9a4b-84270c88df1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.454705] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1009.454705] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a819f2-b564-0bc3-4db7-9313fa10ff72" [ 1009.454705] env[69992]: _type = "Task" [ 1009.454705] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.464660] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a819f2-b564-0bc3-4db7-9313fa10ff72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.482367] env[69992]: DEBUG nova.compute.utils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1009.485672] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1009.485850] env[69992]: DEBUG nova.network.neutron [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1009.529190] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897033, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596829} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.529457] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6/30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.529678] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.529918] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3511190e-5107-4245-965d-9c385baa897f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.537771] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1009.537771] env[69992]: value = "task-2897037" [ 1009.537771] env[69992]: _type = "Task" [ 1009.537771] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.542623] env[69992]: DEBUG nova.policy [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '044902c6075d41739188628ba5ebd58d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b51b8195c4e7418cbdaa66aa5e5aff5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1009.550949] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.599809] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1009.600190] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-541f7fc3-d1c2-4210-b360-25f0ae026166 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.610682] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1009.610682] env[69992]: value = "task-2897038" [ 1009.610682] env[69992]: _type = "Task" [ 1009.610682] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.619873] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897038, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.683211] env[69992]: DEBUG nova.compute.manager [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Received event network-vif-plugged-a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.683471] env[69992]: DEBUG oslo_concurrency.lockutils [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] Acquiring lock "2b1a0943-d59a-441d-a2e6-8149106803b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.683726] env[69992]: DEBUG oslo_concurrency.lockutils [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.683951] env[69992]: DEBUG oslo_concurrency.lockutils [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.684160] env[69992]: DEBUG nova.compute.manager [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] No waiting events found dispatching network-vif-plugged-a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1009.684359] env[69992]: WARNING nova.compute.manager [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Received unexpected event network-vif-plugged-a50c02e4-9a37-4f83-8d66-8afea64e2bc5 for instance with vm_state building and task_state spawning. [ 1009.684529] env[69992]: DEBUG nova.compute.manager [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Received event network-changed-a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.684688] env[69992]: DEBUG nova.compute.manager [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Refreshing instance network info cache due to event network-changed-a50c02e4-9a37-4f83-8d66-8afea64e2bc5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1009.684861] env[69992]: DEBUG oslo_concurrency.lockutils [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] Acquiring lock "refresh_cache-2b1a0943-d59a-441d-a2e6-8149106803b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.784500] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.823484] env[69992]: DEBUG nova.network.neutron [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Successfully created port: a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.859643] env[69992]: DEBUG nova.network.neutron [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.967720] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a819f2-b564-0bc3-4db7-9313fa10ff72, 'name': SearchDatastore_Task, 'duration_secs': 0.051901} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.972095] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.972377] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 62936d27-5405-4d29-b3ff-c4d8a74ba440/62936d27-5405-4d29-b3ff-c4d8a74ba440.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.972805] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9f1ab4e-ef91-4441-a0fe-aedb5872a455 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.982709] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1009.982709] env[69992]: value = "task-2897039" [ 1009.982709] env[69992]: _type = "Task" [ 1009.982709] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.994218] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1009.997054] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.054803] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.226535} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.058602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.061132] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f028ff99-097b-4fce-b5ef-2b89daa4012a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.092337] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6/30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.095974] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9e13cce-3855-4e95-bd29-6d2f59f02ca5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.113264] env[69992]: DEBUG nova.network.neutron [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Updating instance_info_cache with network_info: [{"id": "a50c02e4-9a37-4f83-8d66-8afea64e2bc5", "address": "fa:16:3e:86:90:b6", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa50c02e4-9a", "ovs_interfaceid": "a50c02e4-9a37-4f83-8d66-8afea64e2bc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.128545] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897038, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.129948] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1010.129948] env[69992]: value = "task-2897040" [ 1010.129948] env[69992]: _type = "Task" [ 1010.129948] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.282908] env[69992]: DEBUG oslo_vmware.api [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.773732} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.283251] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.283392] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.283591] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.283725] env[69992]: INFO nova.compute.manager [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1010.283977] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.286925] env[69992]: DEBUG nova.compute.manager [-] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1010.287053] env[69992]: DEBUG nova.network.neutron [-] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1010.504539] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897039, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.618321] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "refresh_cache-2b1a0943-d59a-441d-a2e6-8149106803b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.618629] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Instance network_info: |[{"id": "a50c02e4-9a37-4f83-8d66-8afea64e2bc5", "address": "fa:16:3e:86:90:b6", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa50c02e4-9a", "ovs_interfaceid": "a50c02e4-9a37-4f83-8d66-8afea64e2bc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1010.622499] env[69992]: DEBUG oslo_concurrency.lockutils [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] Acquired lock "refresh_cache-2b1a0943-d59a-441d-a2e6-8149106803b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.622695] env[69992]: DEBUG nova.network.neutron [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Refreshing network info cache for port a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.623985] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:90:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a50c02e4-9a37-4f83-8d66-8afea64e2bc5', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.636138] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.636309] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.641322] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1b37c8d-8c25-47cb-8669-0b32793c30cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.664297] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897038, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.671224] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897040, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.672781] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.672781] env[69992]: value = "task-2897041" [ 1010.672781] env[69992]: _type = "Task" [ 1010.672781] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.684696] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897041, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.746163] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb8574-749b-45b7-893b-cf0d9848e3fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.758368] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f613bb08-de54-4ee1-b9f8-c49d10db7501 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.793228] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306f88aa-b5d4-498b-939b-ba8e7d26c8c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.803091] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42861cb1-493c-45ac-9480-93de60aebb84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.822297] env[69992]: DEBUG nova.compute.provider_tree [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1010.994731] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522984} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.995101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 62936d27-5405-4d29-b3ff-c4d8a74ba440/62936d27-5405-4d29-b3ff-c4d8a74ba440.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.995204] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.995556] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee0c3a23-488e-41e9-81e0-a200ac873e84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.003346] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1011.003346] env[69992]: value = "task-2897042" [ 1011.003346] env[69992]: _type = "Task" [ 1011.003346] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.008157] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1011.016552] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897042, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.041861] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1011.042251] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.042394] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.042599] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.042770] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.042940] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1011.043180] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1011.043345] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1011.044607] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1011.044607] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1011.044607] env[69992]: DEBUG nova.virt.hardware [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1011.045307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0251d24b-28f9-4a17-9e40-cfd4b30b9609 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.054156] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc28025-5be4-4aa7-8e79-1bfbf7aae523 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.083421] env[69992]: DEBUG nova.network.neutron [-] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.132809] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897038, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.146235] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897040, 'name': ReconfigVM_Task, 'duration_secs': 0.714256} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.146693] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6/30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.147361] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-520e3d93-60ae-4ee9-babe-4e87c4e1b8e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.155380] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1011.155380] env[69992]: value = "task-2897043" [ 1011.155380] env[69992]: _type = "Task" [ 1011.155380] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.164958] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897043, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.183214] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897041, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.371037] env[69992]: DEBUG nova.scheduler.client.report [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 71 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1011.371352] env[69992]: DEBUG nova.compute.provider_tree [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 71 to 72 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1011.371538] env[69992]: DEBUG nova.compute.provider_tree [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1011.450569] env[69992]: DEBUG nova.network.neutron [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Updated VIF entry in instance network info cache for port a50c02e4-9a37-4f83-8d66-8afea64e2bc5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.450569] env[69992]: DEBUG nova.network.neutron [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Updating instance_info_cache with network_info: [{"id": "a50c02e4-9a37-4f83-8d66-8afea64e2bc5", "address": "fa:16:3e:86:90:b6", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa50c02e4-9a", "ovs_interfaceid": "a50c02e4-9a37-4f83-8d66-8afea64e2bc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.515076] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897042, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155528} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.515362] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.518163] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7476ef-339c-4d65-b2e7-4b29f1dd5f5a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.539379] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 62936d27-5405-4d29-b3ff-c4d8a74ba440/62936d27-5405-4d29-b3ff-c4d8a74ba440.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.539863] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb8fda6b-6e9e-443f-ab83-2ff5bae39fb6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.560861] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1011.560861] env[69992]: value = "task-2897044" [ 1011.560861] env[69992]: _type = "Task" [ 1011.560861] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.570538] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897044, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.587176] env[69992]: INFO nova.compute.manager [-] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Took 1.30 seconds to deallocate network for instance. [ 1011.595840] env[69992]: DEBUG nova.network.neutron [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Successfully updated port: a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.629648] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897038, 'name': CloneVM_Task, 'duration_secs': 1.758457} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.629893] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Created linked-clone VM from snapshot [ 1011.630679] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67064c1b-9cf8-4f6f-9732-dd11e893e72b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.639280] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Uploading image f4723384-8c26-48b3-817e-be7849f27178 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1011.664898] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1011.664898] env[69992]: value = "vm-581950" [ 1011.664898] env[69992]: _type = "VirtualMachine" [ 1011.664898] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1011.665211] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-17fbac39-2869-423a-8222-71622a1e62a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.669650] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897043, 'name': Rename_Task, 'duration_secs': 0.365818} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.672156] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.672156] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4a1e790-caca-4726-aee7-e58f2e77117a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.679958] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1011.679958] env[69992]: value = "task-2897046" [ 1011.679958] env[69992]: _type = "Task" [ 1011.679958] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.680221] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease: (returnval){ [ 1011.680221] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525d118b-f94d-cd9d-a2bc-5a16a8af945a" [ 1011.680221] env[69992]: _type = "HttpNfcLease" [ 1011.680221] env[69992]: } obtained for exporting VM: (result){ [ 1011.680221] env[69992]: value = "vm-581950" [ 1011.680221] env[69992]: _type = "VirtualMachine" [ 1011.680221] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1011.680488] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the lease: (returnval){ [ 1011.680488] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525d118b-f94d-cd9d-a2bc-5a16a8af945a" [ 1011.680488] env[69992]: _type = "HttpNfcLease" [ 1011.680488] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1011.690183] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897041, 'name': CreateVM_Task, 'duration_secs': 0.588883} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.690905] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.691613] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.691941] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.692456] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1011.695881] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e839e4e1-3be8-4f21-bef6-8912c7560bab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.697919] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897046, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.699427] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1011.699427] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525d118b-f94d-cd9d-a2bc-5a16a8af945a" [ 1011.699427] env[69992]: _type = "HttpNfcLease" [ 1011.699427] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1011.703878] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1011.703878] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524655b4-4683-59b5-dc62-795a918cb49d" [ 1011.703878] env[69992]: _type = "Task" [ 1011.703878] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.712705] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524655b4-4683-59b5-dc62-795a918cb49d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.800243] env[69992]: DEBUG nova.compute.manager [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Received event network-vif-deleted-0964ccc2-743e-4ab2-bbee-76f6b55f151e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.800445] env[69992]: DEBUG nova.compute.manager [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Received event network-vif-plugged-a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.801242] env[69992]: DEBUG oslo_concurrency.lockutils [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] Acquiring lock "714fafbf-a765-4e2c-8633-997d8244483c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.801242] env[69992]: DEBUG oslo_concurrency.lockutils [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] Lock "714fafbf-a765-4e2c-8633-997d8244483c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.801242] env[69992]: DEBUG oslo_concurrency.lockutils [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] Lock "714fafbf-a765-4e2c-8633-997d8244483c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.801242] env[69992]: DEBUG nova.compute.manager [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] No waiting events found dispatching network-vif-plugged-a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1011.801440] env[69992]: WARNING nova.compute.manager [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Received unexpected event network-vif-plugged-a8930976-0d99-4add-b5de-4f68e2761d75 for instance with vm_state building and task_state spawning. [ 1011.802749] env[69992]: DEBUG nova.compute.manager [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Received event network-changed-a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1011.802749] env[69992]: DEBUG nova.compute.manager [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Refreshing instance network info cache due to event network-changed-a8930976-0d99-4add-b5de-4f68e2761d75. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1011.802749] env[69992]: DEBUG oslo_concurrency.lockutils [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] Acquiring lock "refresh_cache-714fafbf-a765-4e2c-8633-997d8244483c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.802749] env[69992]: DEBUG oslo_concurrency.lockutils [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] Acquired lock "refresh_cache-714fafbf-a765-4e2c-8633-997d8244483c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.802749] env[69992]: DEBUG nova.network.neutron [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Refreshing network info cache for port a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.881021] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.903s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.881021] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1011.882133] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.228s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.882356] env[69992]: DEBUG nova.objects.instance [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lazy-loading 'resources' on Instance uuid ee4c0f2b-44cb-4b37-8e4a-5706b9932144 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.956951] env[69992]: DEBUG oslo_concurrency.lockutils [req-c1e62929-0b92-4d14-8f63-90a6a7e89c70 req-5127efc6-c10a-4959-a583-88512bef4558 service nova] Releasing lock "refresh_cache-2b1a0943-d59a-441d-a2e6-8149106803b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.073933] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.097200] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.097720] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-714fafbf-a765-4e2c-8633-997d8244483c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.193300] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1012.193300] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525d118b-f94d-cd9d-a2bc-5a16a8af945a" [ 1012.193300] env[69992]: _type = "HttpNfcLease" [ 1012.193300] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1012.196601] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1012.196601] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525d118b-f94d-cd9d-a2bc-5a16a8af945a" [ 1012.196601] env[69992]: _type = "HttpNfcLease" [ 1012.196601] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1012.196892] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897046, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.197644] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e6c4d6-e577-4dae-9439-491ced787f4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.206604] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a8e638-8952-12e9-2ba6-0652b990e531/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1012.206787] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a8e638-8952-12e9-2ba6-0652b990e531/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1012.274401] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524655b4-4683-59b5-dc62-795a918cb49d, 'name': SearchDatastore_Task, 'duration_secs': 0.010875} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.275885] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.276152] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.276586] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.276635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.276849] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.277482] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fce108b-02e9-4cd4-9c31-3b9f7b78e31b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.286904] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.287102] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.287897] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c392032d-4b7e-46c1-8841-4e9c03dcbf96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.294640] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1012.294640] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526dbacd-0d6f-7ebd-611e-781cdf533367" [ 1012.294640] env[69992]: _type = "Task" [ 1012.294640] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.302957] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526dbacd-0d6f-7ebd-611e-781cdf533367, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.311496] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d357a115-89a9-4c13-94f4-284a09222763 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.338194] env[69992]: DEBUG nova.network.neutron [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1012.385755] env[69992]: DEBUG nova.compute.utils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1012.388316] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1012.388316] env[69992]: DEBUG nova.network.neutron [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.472302] env[69992]: DEBUG nova.network.neutron [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.477225] env[69992]: DEBUG nova.policy [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '461a16451840440a86fa85e586077d52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bb75ccd37b3415a8837de260e0886c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1012.570807] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897044, 'name': ReconfigVM_Task, 'duration_secs': 0.713947} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.573656] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 62936d27-5405-4d29-b3ff-c4d8a74ba440/62936d27-5405-4d29-b3ff-c4d8a74ba440.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.574561] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b6f2cd3-cd75-4e59-a7e6-4c06b47e8c7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.581201] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1012.581201] env[69992]: value = "task-2897047" [ 1012.581201] env[69992]: _type = "Task" [ 1012.581201] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.592317] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897047, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.694892] env[69992]: DEBUG oslo_vmware.api [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897046, 'name': PowerOnVM_Task, 'duration_secs': 0.725213} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.695261] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.696099] env[69992]: INFO nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1012.696099] env[69992]: DEBUG nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.696870] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fba42c4-6f83-4fe0-bd20-4185a3d14d2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.809680] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526dbacd-0d6f-7ebd-611e-781cdf533367, 'name': SearchDatastore_Task, 'duration_secs': 0.010415} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.810586] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80f18ad6-18d7-419c-ae39-92073a91dde9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.817285] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1012.817285] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525b626d-70ab-bd83-41d7-514b0b42f81b" [ 1012.817285] env[69992]: _type = "Task" [ 1012.817285] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.826441] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525b626d-70ab-bd83-41d7-514b0b42f81b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.859695] env[69992]: DEBUG nova.network.neutron [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Successfully created port: e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.899292] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1012.975803] env[69992]: DEBUG oslo_concurrency.lockutils [req-b455d21f-c94f-4526-a7a0-5e94584c9e04 req-0fba9c64-b1b7-47b3-9831-c7c2d602731f service nova] Releasing lock "refresh_cache-714fafbf-a765-4e2c-8633-997d8244483c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.975803] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-714fafbf-a765-4e2c-8633-997d8244483c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.975803] env[69992]: DEBUG nova.network.neutron [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.069472] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0814e2be-9456-43d3-9bcf-9b299b02b48b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.082793] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa02da0-9079-4cb8-92a9-c99d7727cfeb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.093727] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897047, 'name': Rename_Task, 'duration_secs': 0.205341} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.125874] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.126763] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac5966c8-4e20-470c-b172-8ec2eff0c5ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.130486] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af794598-dd10-4495-8d12-b85eb25581ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.139009] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8a082e-14b3-45d2-a059-47d6213b146c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.145501] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1013.145501] env[69992]: value = "task-2897048" [ 1013.145501] env[69992]: _type = "Task" [ 1013.145501] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.165829] env[69992]: DEBUG nova.compute.provider_tree [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1013.172059] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897048, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.218489] env[69992]: INFO nova.compute.manager [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Took 58.90 seconds to build instance. [ 1013.333417] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525b626d-70ab-bd83-41d7-514b0b42f81b, 'name': SearchDatastore_Task, 'duration_secs': 0.010816} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.333417] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.333783] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 2b1a0943-d59a-441d-a2e6-8149106803b6/2b1a0943-d59a-441d-a2e6-8149106803b6.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1013.334600] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fb7e02a-8398-46bb-8ca8-45a9985dac9b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.342156] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1013.342156] env[69992]: value = "task-2897049" [ 1013.342156] env[69992]: _type = "Task" [ 1013.342156] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.350507] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.513040] env[69992]: DEBUG nova.network.neutron [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1013.661364] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897048, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.663915] env[69992]: DEBUG nova.network.neutron [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Updating instance_info_cache with network_info: [{"id": "a8930976-0d99-4add-b5de-4f68e2761d75", "address": "fa:16:3e:cc:4d:cc", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8930976-0d", "ovs_interfaceid": "a8930976-0d99-4add-b5de-4f68e2761d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.711869] env[69992]: DEBUG nova.scheduler.client.report [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1013.712246] env[69992]: DEBUG nova.compute.provider_tree [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 72 to 73 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1013.712445] env[69992]: DEBUG nova.compute.provider_tree [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1013.720109] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77a53de3-7a76-45e4-93f7-81c31195470d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.581s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.853146] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504475} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.854045] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 2b1a0943-d59a-441d-a2e6-8149106803b6/2b1a0943-d59a-441d-a2e6-8149106803b6.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.854370] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.855180] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6399788-bb55-44ba-a08e-360aff49df97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.862074] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1013.862074] env[69992]: value = "task-2897050" [ 1013.862074] env[69992]: _type = "Task" [ 1013.862074] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.873750] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897050, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.912433] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1013.938677] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1013.938927] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.939110] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.939344] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.939509] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.940708] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1013.941146] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1013.943173] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1013.943450] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1013.943702] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1013.943939] env[69992]: DEBUG nova.virt.hardware [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1013.945176] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfde0a2-cbba-4b22-b55c-a005c3128f4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.954544] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce0c7e4-8ee6-4d01-b5c7-bec9dc0ec5f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.157373] env[69992]: DEBUG oslo_vmware.api [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897048, 'name': PowerOnVM_Task, 'duration_secs': 0.898197} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.157776] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1014.158356] env[69992]: INFO nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1014.159541] env[69992]: DEBUG nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.159541] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ea1654-49e4-4060-aef9-44dcf9a8ae2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.173275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-714fafbf-a765-4e2c-8633-997d8244483c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.174459] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance network_info: |[{"id": "a8930976-0d99-4add-b5de-4f68e2761d75", "address": "fa:16:3e:cc:4d:cc", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8930976-0d", "ovs_interfaceid": "a8930976-0d99-4add-b5de-4f68e2761d75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1014.174559] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:4d:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8930976-0d99-4add-b5de-4f68e2761d75', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.184384] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.184807] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.184807] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-239dd346-c710-4670-93bc-527953652c17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.210966] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.210966] env[69992]: value = "task-2897051" [ 1014.210966] env[69992]: _type = "Task" [ 1014.210966] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.223590] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.341s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.227151] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897051, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.227792] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.356s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.230250] env[69992]: INFO nova.compute.claims [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.234021] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1014.265795] env[69992]: INFO nova.scheduler.client.report [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Deleted allocations for instance ee4c0f2b-44cb-4b37-8e4a-5706b9932144 [ 1014.373567] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897050, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070121} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.374123] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.375113] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c849d9-79da-403d-8812-514bbf3baa73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.401922] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 2b1a0943-d59a-441d-a2e6-8149106803b6/2b1a0943-d59a-441d-a2e6-8149106803b6.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.402290] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a878b69-8ff7-41a6-8054-6b8a9e657721 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.429186] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1014.429186] env[69992]: value = "task-2897052" [ 1014.429186] env[69992]: _type = "Task" [ 1014.429186] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.440564] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897052, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.524438] env[69992]: DEBUG nova.network.neutron [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Successfully updated port: e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.686154] env[69992]: INFO nova.compute.manager [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Took 55.08 seconds to build instance. [ 1014.724701] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897051, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.769316] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.777922] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ab9f941-5191-4f32-bee9-9f61e9fe6026 tempest-FloatingIPsAssociationTestJSON-895309651 tempest-FloatingIPsAssociationTestJSON-895309651-project-member] Lock "ee4c0f2b-44cb-4b37-8e4a-5706b9932144" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.876s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.797031] env[69992]: DEBUG nova.compute.manager [req-ef8163c6-c3ad-4f66-8433-96818bb6772b req-b4c8ff61-65b0-4738-bcc8-258966751277 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Received event network-vif-plugged-e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.797031] env[69992]: DEBUG oslo_concurrency.lockutils [req-ef8163c6-c3ad-4f66-8433-96818bb6772b req-b4c8ff61-65b0-4738-bcc8-258966751277 service nova] Acquiring lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.797031] env[69992]: DEBUG oslo_concurrency.lockutils [req-ef8163c6-c3ad-4f66-8433-96818bb6772b req-b4c8ff61-65b0-4738-bcc8-258966751277 service nova] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.797031] env[69992]: DEBUG oslo_concurrency.lockutils [req-ef8163c6-c3ad-4f66-8433-96818bb6772b req-b4c8ff61-65b0-4738-bcc8-258966751277 service nova] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.797332] env[69992]: DEBUG nova.compute.manager [req-ef8163c6-c3ad-4f66-8433-96818bb6772b req-b4c8ff61-65b0-4738-bcc8-258966751277 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] No waiting events found dispatching network-vif-plugged-e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.797483] env[69992]: WARNING nova.compute.manager [req-ef8163c6-c3ad-4f66-8433-96818bb6772b req-b4c8ff61-65b0-4738-bcc8-258966751277 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Received unexpected event network-vif-plugged-e8395fdf-5e7f-47d5-9385-6cb5a2090486 for instance with vm_state building and task_state spawning. [ 1014.948168] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897052, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.033642] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "refresh_cache-b7a1b9e1-4d57-435f-bdb6-51481968aacb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.033642] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired lock "refresh_cache-b7a1b9e1-4d57-435f-bdb6-51481968aacb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.033642] env[69992]: DEBUG nova.network.neutron [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.097415] env[69992]: DEBUG nova.compute.manager [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received event network-changed-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.097415] env[69992]: DEBUG nova.compute.manager [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing instance network info cache due to event network-changed-abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1015.098200] env[69992]: DEBUG oslo_concurrency.lockutils [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.098638] env[69992]: DEBUG oslo_concurrency.lockutils [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.100385] env[69992]: DEBUG nova.network.neutron [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing network info cache for port abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.186703] env[69992]: DEBUG nova.compute.manager [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.188044] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a53c84-b6f2-48a5-8e40-3c926a4f782e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.193592] env[69992]: DEBUG oslo_concurrency.lockutils [None req-19c45c8c-dd01-40d7-a1cc-6521c097ae01 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.278s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.227937] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897051, 'name': CreateVM_Task, 'duration_secs': 0.589887} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.228307] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.229253] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.230792] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.231223] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1015.231906] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d22b6157-4da6-4262-8e5e-d839ef41b0e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.239563] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1015.239563] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fd625f-4ce1-e322-8e4a-96e27b302943" [ 1015.239563] env[69992]: _type = "Task" [ 1015.239563] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.253762] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fd625f-4ce1-e322-8e4a-96e27b302943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.440828] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897052, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.590396] env[69992]: DEBUG nova.network.neutron [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.694895] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1015.701097] env[69992]: INFO nova.compute.manager [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] instance snapshotting [ 1015.701758] env[69992]: DEBUG nova.objects.instance [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'flavor' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.761289] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fd625f-4ce1-e322-8e4a-96e27b302943, 'name': SearchDatastore_Task, 'duration_secs': 0.019326} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.761763] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.762139] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.762420] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.762617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.762833] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.765848] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8f9293a-855b-42bb-9558-046f369fd120 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.775578] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.775772] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.779306] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f01f7c6b-294c-4815-8027-da277744645a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.791354] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1015.791354] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52dffac4-cf1e-dd79-f80a-45e3f574d2b5" [ 1015.791354] env[69992]: _type = "Task" [ 1015.791354] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.806202] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52dffac4-cf1e-dd79-f80a-45e3f574d2b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.948832] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897052, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.959566] env[69992]: DEBUG nova.network.neutron [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Updating instance_info_cache with network_info: [{"id": "e8395fdf-5e7f-47d5-9385-6cb5a2090486", "address": "fa:16:3e:15:2a:70", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8395fdf-5e", "ovs_interfaceid": "e8395fdf-5e7f-47d5-9385-6cb5a2090486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.965951] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63078bca-fa22-42ee-9560-82310c3e0977 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.976693] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abdb33e-a321-4a7c-ad50-e143b54d242e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.033188] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7070ef8c-c371-4975-9768-a668ce8002ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.042409] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d1df38-17db-43b4-a48d-7e942d534f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.061129] env[69992]: DEBUG nova.compute.provider_tree [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.119149] env[69992]: DEBUG nova.network.neutron [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updated VIF entry in instance network info cache for port abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1016.119609] env[69992]: DEBUG nova.network.neutron [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.208975] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c30997-9ccc-406c-a234-1266ac713920 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.235059] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.235876] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca84c2c5-0922-4759-bc2b-8412e72145ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.302541] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52dffac4-cf1e-dd79-f80a-45e3f574d2b5, 'name': SearchDatastore_Task, 'duration_secs': 0.016833} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.303639] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-557f8a46-36f1-4fe6-8b08-964ad5e4b58e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.309468] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1016.309468] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526d8eba-2e89-5f5e-e018-e02e5c317f6d" [ 1016.309468] env[69992]: _type = "Task" [ 1016.309468] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.318569] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526d8eba-2e89-5f5e-e018-e02e5c317f6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.393560] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "62936d27-5405-4d29-b3ff-c4d8a74ba440" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.393886] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.395101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "62936d27-5405-4d29-b3ff-c4d8a74ba440-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.395101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.395101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.396670] env[69992]: INFO nova.compute.manager [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Terminating instance [ 1016.443317] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897052, 'name': ReconfigVM_Task, 'duration_secs': 1.529199} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.444346] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 2b1a0943-d59a-441d-a2e6-8149106803b6/2b1a0943-d59a-441d-a2e6-8149106803b6.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.444346] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dd6e38b-2caa-45b0-94a0-6d6cf28f6f71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.452104] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1016.452104] env[69992]: value = "task-2897053" [ 1016.452104] env[69992]: _type = "Task" [ 1016.452104] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.461193] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897053, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.461622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Releasing lock "refresh_cache-b7a1b9e1-4d57-435f-bdb6-51481968aacb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.461935] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Instance network_info: |[{"id": "e8395fdf-5e7f-47d5-9385-6cb5a2090486", "address": "fa:16:3e:15:2a:70", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8395fdf-5e", "ovs_interfaceid": "e8395fdf-5e7f-47d5-9385-6cb5a2090486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1016.462396] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:2a:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8395fdf-5e7f-47d5-9385-6cb5a2090486', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.470761] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Creating folder: Project (4bb75ccd37b3415a8837de260e0886c3). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.471907] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7f5997c-67f5-45e6-9ef7-10249ef009dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.486166] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Created folder: Project (4bb75ccd37b3415a8837de260e0886c3) in parent group-v581821. [ 1016.486384] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Creating folder: Instances. Parent ref: group-v581953. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.486639] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6692acff-623f-4d30-91c2-ad2b77580f28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.496711] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Created folder: Instances in parent group-v581953. [ 1016.496970] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.497193] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.497406] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ddf014d-98e6-4d94-b60d-c28d6aa5423a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.519531] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.519531] env[69992]: value = "task-2897056" [ 1016.519531] env[69992]: _type = "Task" [ 1016.519531] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.527458] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897056, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.604555] env[69992]: DEBUG nova.scheduler.client.report [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1016.604828] env[69992]: DEBUG nova.compute.provider_tree [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 73 to 74 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1016.605074] env[69992]: DEBUG nova.compute.provider_tree [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.622365] env[69992]: DEBUG oslo_concurrency.lockutils [req-bc9e3040-b634-4860-bfe8-4786311c92ab req-f7ca00b3-40eb-4be3-994d-37e4c9685673 service nova] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.752556] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1016.753278] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-176ba2cc-d66b-4853-b3ab-12696cdac687 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.762375] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1016.762375] env[69992]: value = "task-2897057" [ 1016.762375] env[69992]: _type = "Task" [ 1016.762375] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.774413] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897057, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.821170] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526d8eba-2e89-5f5e-e018-e02e5c317f6d, 'name': SearchDatastore_Task, 'duration_secs': 0.010263} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.821468] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.821723] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1016.822018] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1eefb807-b750-4573-a2e4-92045fcef8a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.831073] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1016.831073] env[69992]: value = "task-2897058" [ 1016.831073] env[69992]: _type = "Task" [ 1016.831073] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.842480] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.900720] env[69992]: DEBUG nova.compute.manager [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1016.901127] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.902341] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9a980d-a0da-4cf7-85b3-94348ee2a86d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.913749] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.915505] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d2d69f5-b180-44ef-ab45-1dd632121637 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.921822] env[69992]: DEBUG nova.compute.manager [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Received event network-changed-e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1016.922458] env[69992]: DEBUG nova.compute.manager [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Refreshing instance network info cache due to event network-changed-e8395fdf-5e7f-47d5-9385-6cb5a2090486. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1016.922458] env[69992]: DEBUG oslo_concurrency.lockutils [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] Acquiring lock "refresh_cache-b7a1b9e1-4d57-435f-bdb6-51481968aacb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.922567] env[69992]: DEBUG oslo_concurrency.lockutils [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] Acquired lock "refresh_cache-b7a1b9e1-4d57-435f-bdb6-51481968aacb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.922749] env[69992]: DEBUG nova.network.neutron [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Refreshing network info cache for port e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.926587] env[69992]: DEBUG oslo_vmware.api [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1016.926587] env[69992]: value = "task-2897059" [ 1016.926587] env[69992]: _type = "Task" [ 1016.926587] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.940561] env[69992]: DEBUG oslo_vmware.api [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897059, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.967078] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897053, 'name': Rename_Task, 'duration_secs': 0.271864} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.967588] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.967944] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32348e10-5d2d-4f60-9f19-5da330284c58 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.978390] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1016.978390] env[69992]: value = "task-2897060" [ 1016.978390] env[69992]: _type = "Task" [ 1016.978390] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.990940] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.034313] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897056, 'name': CreateVM_Task, 'duration_secs': 0.339809} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.034495] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.035606] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.035911] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.036523] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.036982] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11e53ef2-fd01-4425-b542-4c76af6a4f66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.046118] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1017.046118] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528c1983-d0a0-c55a-6476-144b0e8471e6" [ 1017.046118] env[69992]: _type = "Task" [ 1017.046118] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.058454] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528c1983-d0a0-c55a-6476-144b0e8471e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.113339] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.884s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.113339] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1017.116208] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.775s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.116475] env[69992]: DEBUG nova.objects.instance [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lazy-loading 'resources' on Instance uuid 97cb6372-3f4e-427d-9509-7e6c43aa2e7b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.273348] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897057, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.342292] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897058, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.438936] env[69992]: DEBUG oslo_vmware.api [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897059, 'name': PowerOffVM_Task, 'duration_secs': 0.26586} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.439704] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.439704] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.439704] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a44563b5-a598-4d51-a2e3-66dee940883a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.489205] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897060, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.503849] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.504898] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.504898] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleting the datastore file [datastore2] 62936d27-5405-4d29-b3ff-c4d8a74ba440 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.504898] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b21285b-ec55-430e-b650-c18bf24bebf5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.510986] env[69992]: DEBUG oslo_vmware.api [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1017.510986] env[69992]: value = "task-2897062" [ 1017.510986] env[69992]: _type = "Task" [ 1017.510986] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.522417] env[69992]: DEBUG oslo_vmware.api [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.557950] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528c1983-d0a0-c55a-6476-144b0e8471e6, 'name': SearchDatastore_Task, 'duration_secs': 0.055407} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.558379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.558731] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.559163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.559382] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.559624] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.560057] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e249464b-3603-4a29-aee4-23f739e286c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.569324] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.569510] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.570379] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6b025c4-e5d8-4c24-baad-bb86734165fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.578925] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1017.578925] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524f329a-eda8-ba38-3784-b391a3921987" [ 1017.578925] env[69992]: _type = "Task" [ 1017.578925] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.590576] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524f329a-eda8-ba38-3784-b391a3921987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.626055] env[69992]: DEBUG nova.compute.utils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1017.627881] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1017.628080] env[69992]: DEBUG nova.network.neutron [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1017.703906] env[69992]: DEBUG nova.network.neutron [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Updated VIF entry in instance network info cache for port e8395fdf-5e7f-47d5-9385-6cb5a2090486. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.704281] env[69992]: DEBUG nova.network.neutron [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Updating instance_info_cache with network_info: [{"id": "e8395fdf-5e7f-47d5-9385-6cb5a2090486", "address": "fa:16:3e:15:2a:70", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8395fdf-5e", "ovs_interfaceid": "e8395fdf-5e7f-47d5-9385-6cb5a2090486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.709192] env[69992]: DEBUG nova.policy [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99d0f7e4d6f94f1da01ba71cb88d8c73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b2281c7442a4ab798e2581de5f8cdce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1017.781522] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897057, 'name': CreateSnapshot_Task, 'duration_secs': 0.856318} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.782412] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1017.784388] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759e3ad6-35c6-4416-abfc-f416cff4d401 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.842998] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897058, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51242} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.843608] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.843608] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.843745] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45570d88-c3cd-4d95-a6ea-f145ad9a1f6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.852995] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1017.852995] env[69992]: value = "task-2897063" [ 1017.852995] env[69992]: _type = "Task" [ 1017.852995] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.866174] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.993616] env[69992]: DEBUG oslo_vmware.api [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897060, 'name': PowerOnVM_Task, 'duration_secs': 0.744742} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.993898] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.994118] env[69992]: INFO nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1017.994300] env[69992]: DEBUG nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.995149] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a178bb2f-35f9-4cfd-86f2-b9b9a4129c26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.028500] env[69992]: DEBUG oslo_vmware.api [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186785} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.028500] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.028500] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.028500] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.028500] env[69992]: INFO nova.compute.manager [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1018.028669] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1018.028669] env[69992]: DEBUG nova.compute.manager [-] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1018.028669] env[69992]: DEBUG nova.network.neutron [-] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1018.088981] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524f329a-eda8-ba38-3784-b391a3921987, 'name': SearchDatastore_Task, 'duration_secs': 0.009899} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.089682] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b35bc740-2966-4354-83a1-89014cea50a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.094782] env[69992]: DEBUG nova.network.neutron [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Successfully created port: bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.102343] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1018.102343] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52007023-8389-b647-2fc7-0ec4369bc99d" [ 1018.102343] env[69992]: _type = "Task" [ 1018.102343] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.113553] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52007023-8389-b647-2fc7-0ec4369bc99d, 'name': SearchDatastore_Task, 'duration_secs': 0.011135} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.113821] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.114175] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b7a1b9e1-4d57-435f-bdb6-51481968aacb/b7a1b9e1-4d57-435f-bdb6-51481968aacb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.114408] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3f345e8-f37e-4dc4-9cd2-1c64e3875a40 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.124857] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1018.124857] env[69992]: value = "task-2897064" [ 1018.124857] env[69992]: _type = "Task" [ 1018.124857] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.137148] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1018.143912] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.207179] env[69992]: DEBUG oslo_concurrency.lockutils [req-443e7c8f-60a3-4cd3-9a3f-a9e5fa7ed46e req-f46c7dfd-286e-419f-b171-22f0432374d3 service nova] Releasing lock "refresh_cache-b7a1b9e1-4d57-435f-bdb6-51481968aacb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.305160] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1018.305559] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-58d528a0-01ef-4744-9829-17e1e2f12ff1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.321124] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1018.321124] env[69992]: value = "task-2897065" [ 1018.321124] env[69992]: _type = "Task" [ 1018.321124] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.332209] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897065, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.365414] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063649} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.366170] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1018.366728] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d156ad79-afdc-49d2-907d-278a0d88371a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.371150] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0116c0f-85bf-4f35-bf1f-6ee1762e9530 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.400605] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.406779] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf7cb234-94fc-4b29-b5ee-ecee71cd3ce2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.425541] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9edbf8-6d1b-46c8-bc2f-ffd3488a48b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.432522] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f0f462-e8ce-4b5a-b6c1-9728bd2f06ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.441673] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1018.441673] env[69992]: value = "task-2897066" [ 1018.441673] env[69992]: _type = "Task" [ 1018.441673] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.481542] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053d5767-9531-40f9-aac6-d23af64da2d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.484895] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Suspending the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1018.485775] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9819273d-3c60-4b42-9fe6-467be82e58e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.501701] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa54403-ea17-4f1e-8567-a2104b392aa8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.508030] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.508030] env[69992]: DEBUG oslo_vmware.api [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1018.508030] env[69992]: value = "task-2897067" [ 1018.508030] env[69992]: _type = "Task" [ 1018.508030] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.525420] env[69992]: DEBUG nova.compute.provider_tree [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.541737] env[69992]: DEBUG oslo_vmware.api [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897067, 'name': SuspendVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.541737] env[69992]: INFO nova.compute.manager [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Took 54.86 seconds to build instance. [ 1018.634971] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485507} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.635239] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b7a1b9e1-4d57-435f-bdb6-51481968aacb/b7a1b9e1-4d57-435f-bdb6-51481968aacb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1018.635460] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1018.635715] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bae54d76-0342-4e62-90b2-344067e0c9c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.642833] env[69992]: INFO nova.virt.block_device [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Booting with volume fbb68063-47ce-447c-a9bc-94fbbe5c17f4 at /dev/sda [ 1018.645679] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1018.645679] env[69992]: value = "task-2897068" [ 1018.645679] env[69992]: _type = "Task" [ 1018.645679] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.658287] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.713470] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43ed922b-3adb-4b84-8d8c-6b5f5b608036 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.722102] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998de3e4-d45e-4003-bced-780b89712b3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.764220] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8489715-4af1-47b9-9868-7b1ce41cce8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.773371] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61cd887-a6ac-412c-9c43-49324989e52c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.809547] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e15eb2-5fab-404f-9dd7-b62011fb7851 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.816339] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f0f983-ab48-4f0f-b741-b2e792a22e26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.829722] env[69992]: DEBUG nova.virt.block_device [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updating existing volume attachment record: 15db2574-b0a8-4378-81e1-d85e315a4198 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1018.836325] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897065, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.986678] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897066, 'name': ReconfigVM_Task, 'duration_secs': 0.436562} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.986893] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.987445] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-525a7324-7f39-4cfb-aab3-d8e0da6c972d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.999027] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1018.999027] env[69992]: value = "task-2897069" [ 1018.999027] env[69992]: _type = "Task" [ 1018.999027] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.006325] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897069, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.016065] env[69992]: DEBUG oslo_vmware.api [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897067, 'name': SuspendVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.033101] env[69992]: DEBUG nova.scheduler.client.report [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.046655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-551b4963-345c-4e33-9aef-a74d64c230f5 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.565s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.156277] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075597} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.156556] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.157434] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b88456-a61c-46e2-aa39-165dfa87fd45 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.181561] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] b7a1b9e1-4d57-435f-bdb6-51481968aacb/b7a1b9e1-4d57-435f-bdb6-51481968aacb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.183022] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8edca96c-6e55-41f8-b388-98cb31e25fb2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.200190] env[69992]: DEBUG nova.compute.manager [req-3befc76d-6872-4cc9-a29c-1f03fd84d931 req-78660677-ccf0-4932-9f36-26c99aad1cfe service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Received event network-vif-deleted-9d7cbf3a-bf72-47c9-a580-0464b06e8c89 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.200486] env[69992]: INFO nova.compute.manager [req-3befc76d-6872-4cc9-a29c-1f03fd84d931 req-78660677-ccf0-4932-9f36-26c99aad1cfe service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Neutron deleted interface 9d7cbf3a-bf72-47c9-a580-0464b06e8c89; detaching it from the instance and deleting it from the info cache [ 1019.200718] env[69992]: DEBUG nova.network.neutron [req-3befc76d-6872-4cc9-a29c-1f03fd84d931 req-78660677-ccf0-4932-9f36-26c99aad1cfe service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.202225] env[69992]: DEBUG nova.network.neutron [-] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.213331] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1019.213331] env[69992]: value = "task-2897070" [ 1019.213331] env[69992]: _type = "Task" [ 1019.213331] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.222759] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.331839] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897065, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.506138] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897069, 'name': Rename_Task, 'duration_secs': 0.165861} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.506414] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1019.506758] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14d24f61-7162-460f-9bf2-c8a6c2f106a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.518654] env[69992]: DEBUG oslo_vmware.api [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897067, 'name': SuspendVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.520185] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1019.520185] env[69992]: value = "task-2897071" [ 1019.520185] env[69992]: _type = "Task" [ 1019.520185] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.530024] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.537842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.422s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.540575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.204s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.540740] env[69992]: DEBUG nova.objects.instance [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lazy-loading 'resources' on Instance uuid a29534bf-ee12-4b94-839b-4a12659ebd3b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.548510] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1019.570918] env[69992]: INFO nova.scheduler.client.report [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Deleted allocations for instance 97cb6372-3f4e-427d-9509-7e6c43aa2e7b [ 1019.707960] env[69992]: INFO nova.compute.manager [-] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Took 1.68 seconds to deallocate network for instance. [ 1019.707960] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4343888e-da7e-4530-9219-bbae79c67266 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.724555] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7952a25b-dcae-41cd-9ce4-652641d53e71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.741794] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897070, 'name': ReconfigVM_Task, 'duration_secs': 0.389226} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.743017] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Reconfigured VM instance instance-0000002d to attach disk [datastore1] b7a1b9e1-4d57-435f-bdb6-51481968aacb/b7a1b9e1-4d57-435f-bdb6-51481968aacb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1019.743756] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be485b8b-a726-4cb7-9c0f-d92c51df6411 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.752025] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1019.752025] env[69992]: value = "task-2897072" [ 1019.752025] env[69992]: _type = "Task" [ 1019.752025] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.779619] env[69992]: DEBUG nova.compute.manager [req-3befc76d-6872-4cc9-a29c-1f03fd84d931 req-78660677-ccf0-4932-9f36-26c99aad1cfe service nova] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Detach interface failed, port_id=9d7cbf3a-bf72-47c9-a580-0464b06e8c89, reason: Instance 62936d27-5405-4d29-b3ff-c4d8a74ba440 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1019.785746] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897072, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.832311] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897065, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.021321] env[69992]: DEBUG oslo_vmware.api [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897067, 'name': SuspendVM_Task, 'duration_secs': 1.10868} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.026285] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Suspended the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1020.026621] env[69992]: DEBUG nova.compute.manager [None req-0ddb29f1-2e24-4db6-88a7-be5f4e7b1f8e tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1020.028331] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091b2ffa-a5af-4acb-a086-a95614b7693f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.038248] env[69992]: DEBUG oslo_vmware.api [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897071, 'name': PowerOnVM_Task, 'duration_secs': 0.520026} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.040517] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1020.041206] env[69992]: INFO nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1020.041458] env[69992]: DEBUG nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1020.048100] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd57965-a16d-4d38-9a74-60f9108a59a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.081113] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7ef51d92-04b2-4bf7-b490-4a385720d045 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "97cb6372-3f4e-427d-9509-7e6c43aa2e7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.232s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.081113] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.195905] env[69992]: DEBUG nova.network.neutron [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Successfully updated port: bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.222222] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.261793] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897072, 'name': Rename_Task, 'duration_secs': 0.153114} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.262097] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.262358] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84fed120-1084-4ba6-a04d-7d3dd968173c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.269950] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1020.269950] env[69992]: value = "task-2897073" [ 1020.269950] env[69992]: _type = "Task" [ 1020.269950] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.279902] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.337472] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897065, 'name': CloneVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.581667] env[69992]: INFO nova.compute.manager [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Took 50.48 seconds to build instance. [ 1020.701944] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquiring lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.702330] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquired lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.703412] env[69992]: DEBUG nova.network.neutron [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.737923] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fac8b36-29c7-4933-8091-f803013826d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.746548] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2914c8c9-1b39-4a45-add2-f9374ffd9cfc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.788256] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81fdd56-545e-4960-b23a-e4c1e70c645a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.796806] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897073, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.800071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46cf23d-acc1-426e-be6e-a0d5e87b7d89 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.815625] env[69992]: DEBUG nova.compute.provider_tree [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.835695] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897065, 'name': CloneVM_Task, 'duration_secs': 2.129079} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.836038] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created linked-clone VM from snapshot [ 1020.836896] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9dd61b1-bfb8-4440-9bd1-71a2ef34003f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.846356] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Uploading image 6534e8d5-5df3-44be-a620-28db421259d9 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1020.868625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "27580836-7ab5-4e64-a985-3e6fc22a8b77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.868625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.868783] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "27580836-7ab5-4e64-a985-3e6fc22a8b77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.869050] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.869119] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.871650] env[69992]: INFO nova.compute.manager [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Terminating instance [ 1020.885476] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1020.885476] env[69992]: value = "vm-581957" [ 1020.885476] env[69992]: _type = "VirtualMachine" [ 1020.885476] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1020.886351] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9cfde2df-15bd-4dd6-ac7b-5de3ea53dec2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.893588] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease: (returnval){ [ 1020.893588] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526b117f-c337-b22c-9574-41c8e79a47f8" [ 1020.893588] env[69992]: _type = "HttpNfcLease" [ 1020.893588] env[69992]: } obtained for exporting VM: (result){ [ 1020.893588] env[69992]: value = "vm-581957" [ 1020.893588] env[69992]: _type = "VirtualMachine" [ 1020.893588] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1020.893911] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the lease: (returnval){ [ 1020.893911] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526b117f-c337-b22c-9574-41c8e79a47f8" [ 1020.893911] env[69992]: _type = "HttpNfcLease" [ 1020.893911] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1020.901332] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1020.901332] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526b117f-c337-b22c-9574-41c8e79a47f8" [ 1020.901332] env[69992]: _type = "HttpNfcLease" [ 1020.901332] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1020.952028] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1020.952893] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.952893] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.953078] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.953276] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.953423] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.953781] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.953850] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.954053] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.954162] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.954333] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.954744] env[69992]: DEBUG nova.virt.hardware [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.955549] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6ca968-1bd8-4d34-a678-ec8b1a15a106 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.965989] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3de710b-5535-41ed-8757-54acb022ead9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.085167] env[69992]: DEBUG oslo_concurrency.lockutils [None req-22cfc8c0-cdf0-4a9d-b510-f62be18765c3 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "714fafbf-a765-4e2c-8633-997d8244483c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.412s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.247424] env[69992]: DEBUG nova.network.neutron [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1021.294405] env[69992]: DEBUG oslo_vmware.api [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897073, 'name': PowerOnVM_Task, 'duration_secs': 0.875782} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.296923] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.297647] env[69992]: INFO nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Took 7.38 seconds to spawn the instance on the hypervisor. [ 1021.297647] env[69992]: DEBUG nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1021.298184] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99c19ee-27a9-48cd-abcc-a78fb3ba870f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.320413] env[69992]: DEBUG nova.scheduler.client.report [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.378253] env[69992]: DEBUG nova.compute.manager [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1021.378557] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.379417] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac1181d-8c9a-4800-9e4a-f788c8547230 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.387513] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.387769] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c1053d1-e201-4859-8a69-a72029ef9d06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.397027] env[69992]: DEBUG oslo_vmware.api [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 1021.397027] env[69992]: value = "task-2897075" [ 1021.397027] env[69992]: _type = "Task" [ 1021.397027] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.405662] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1021.405662] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526b117f-c337-b22c-9574-41c8e79a47f8" [ 1021.405662] env[69992]: _type = "HttpNfcLease" [ 1021.405662] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1021.410594] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1021.410594] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526b117f-c337-b22c-9574-41c8e79a47f8" [ 1021.410594] env[69992]: _type = "HttpNfcLease" [ 1021.410594] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1021.410842] env[69992]: DEBUG oslo_vmware.api [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2897075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.411473] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c8eb55-77bd-4fd7-ab67-64c82d396a31 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.416535] env[69992]: DEBUG nova.network.neutron [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updating instance_info_cache with network_info: [{"id": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "address": "fa:16:3e:74:86:2a", "network": {"id": "19dbabdd-51f5-46b4-aacb-e8a35d25a612", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-686158214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b2281c7442a4ab798e2581de5f8cdce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e2ec358-9bc5-4dd6-8f4e-0d6ec225282a", "external-id": "nsx-vlan-transportzone-843", "segmentation_id": 843, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd75002a-c4", "ovs_interfaceid": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.422988] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52853ae9-d355-3c98-2f7c-dbaf323aba72/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1021.423243] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52853ae9-d355-3c98-2f7c-dbaf323aba72/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1021.549336] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c043630f-2b7c-4802-97dd-14fce7902db6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.588638] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1021.818724] env[69992]: INFO nova.compute.manager [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Took 49.48 seconds to build instance. [ 1021.825778] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.285s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.829848] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.434s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.829848] env[69992]: DEBUG nova.objects.instance [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lazy-loading 'resources' on Instance uuid a9274dfc-afbd-419b-a98b-053d71a05d7c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.855288] env[69992]: INFO nova.scheduler.client.report [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Deleted allocations for instance a29534bf-ee12-4b94-839b-4a12659ebd3b [ 1021.865438] env[69992]: DEBUG nova.compute.manager [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Received event network-vif-plugged-bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.865438] env[69992]: DEBUG oslo_concurrency.lockutils [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] Acquiring lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.865438] env[69992]: DEBUG oslo_concurrency.lockutils [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.865438] env[69992]: DEBUG oslo_concurrency.lockutils [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.865438] env[69992]: DEBUG nova.compute.manager [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] No waiting events found dispatching network-vif-plugged-bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1021.865819] env[69992]: WARNING nova.compute.manager [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Received unexpected event network-vif-plugged-bd75002a-c4e8-4f29-99ff-b6f5055c068d for instance with vm_state building and task_state spawning. [ 1021.866078] env[69992]: DEBUG nova.compute.manager [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Received event network-changed-bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.866540] env[69992]: DEBUG nova.compute.manager [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Refreshing instance network info cache due to event network-changed-bd75002a-c4e8-4f29-99ff-b6f5055c068d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1021.866820] env[69992]: DEBUG oslo_concurrency.lockutils [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] Acquiring lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.912117] env[69992]: DEBUG oslo_vmware.api [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2897075, 'name': PowerOffVM_Task, 'duration_secs': 0.234444} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.912117] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.912451] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.912828] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b79d99f-601e-498a-90d3-cf2d2d1a4fd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.919026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Releasing lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.919405] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Instance network_info: |[{"id": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "address": "fa:16:3e:74:86:2a", "network": {"id": "19dbabdd-51f5-46b4-aacb-e8a35d25a612", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-686158214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b2281c7442a4ab798e2581de5f8cdce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e2ec358-9bc5-4dd6-8f4e-0d6ec225282a", "external-id": "nsx-vlan-transportzone-843", "segmentation_id": 843, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd75002a-c4", "ovs_interfaceid": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1021.919695] env[69992]: DEBUG oslo_concurrency.lockutils [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] Acquired lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.920054] env[69992]: DEBUG nova.network.neutron [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Refreshing network info cache for port bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1021.921407] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:86:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4e2ec358-9bc5-4dd6-8f4e-0d6ec225282a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd75002a-c4e8-4f29-99ff-b6f5055c068d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.932063] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Creating folder: Project (3b2281c7442a4ab798e2581de5f8cdce). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1021.933625] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6c1b211-008f-43db-9f24-5500c32485aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.949116] env[69992]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1021.949179] env[69992]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69992) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1021.949725] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Folder already exists: Project (3b2281c7442a4ab798e2581de5f8cdce). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1021.949867] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Creating folder: Instances. Parent ref: group-v581895. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1021.950309] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddb0dc73-2d38-4c92-8e20-11c2d606e2ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.961107] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Created folder: Instances in parent group-v581895. [ 1021.961579] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.961987] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.962308] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-394f27cd-0f06-4ac7-869c-2a265acf8123 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.982624] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.983594] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.983594] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Deleting the datastore file [datastore1] 27580836-7ab5-4e64-a985-3e6fc22a8b77 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.983764] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba89d0ed-a2aa-4a0b-bd13-238f9123a261 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.993611] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.993611] env[69992]: value = "task-2897079" [ 1021.993611] env[69992]: _type = "Task" [ 1021.993611] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.999398] env[69992]: DEBUG oslo_vmware.api [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for the task: (returnval){ [ 1021.999398] env[69992]: value = "task-2897080" [ 1021.999398] env[69992]: _type = "Task" [ 1021.999398] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.008168] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897079, 'name': CreateVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.014412] env[69992]: DEBUG oslo_vmware.api [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2897080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.116155] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.322616] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81849a26-a7cd-4790-84a9-ed78fe81f4b5 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.818s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.363074] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d8dda7b1-4c53-4e71-9e7e-d3afa6b0fe7b tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "a29534bf-ee12-4b94-839b-4a12659ebd3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.463s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.469455] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.470995] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.494735] env[69992]: DEBUG nova.compute.manager [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.495731] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad4e72d-84c5-4f20-a965-08d7755ee39d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.512346] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.512930] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.525200] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897079, 'name': CreateVM_Task, 'duration_secs': 0.474925} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.528462] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1022.533969] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'disk_bus': None, 'device_type': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581910', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'name': 'volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af07ebd0-5f12-49c3-a518-95be9a8d6c82', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'serial': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4'}, 'attachment_id': '15db2574-b0a8-4378-81e1-d85e315a4198', 'delete_on_termination': True, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=69992) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1022.534140] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Root volume attach. Driver type: vmdk {{(pid=69992) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1022.534510] env[69992]: DEBUG oslo_vmware.api [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Task: {'id': task-2897080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332616} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.538608] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1a858e-7635-436b-90e4-7dafcfa375d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.538744] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.539167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.539167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.539435] env[69992]: INFO nova.compute.manager [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1022.539688] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.540181] env[69992]: DEBUG nova.compute.manager [-] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1022.540358] env[69992]: DEBUG nova.network.neutron [-] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1022.550730] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a614215-0b58-4e4c-a5e2-63abd19b05b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.563327] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370a504a-dcee-44c3-9fe7-b1f74b8fb587 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.570508] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-79b59f45-313d-4f2b-8aea-4f7f22a4efed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.581779] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1022.581779] env[69992]: value = "task-2897081" [ 1022.581779] env[69992]: _type = "Task" [ 1022.581779] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.591520] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897081, 'name': RelocateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.817067] env[69992]: DEBUG nova.network.neutron [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updated VIF entry in instance network info cache for port bd75002a-c4e8-4f29-99ff-b6f5055c068d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1022.817564] env[69992]: DEBUG nova.network.neutron [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updating instance_info_cache with network_info: [{"id": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "address": "fa:16:3e:74:86:2a", "network": {"id": "19dbabdd-51f5-46b4-aacb-e8a35d25a612", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-686158214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b2281c7442a4ab798e2581de5f8cdce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e2ec358-9bc5-4dd6-8f4e-0d6ec225282a", "external-id": "nsx-vlan-transportzone-843", "segmentation_id": 843, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd75002a-c4", "ovs_interfaceid": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.825891] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1022.846834] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a8e638-8952-12e9-2ba6-0652b990e531/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1022.848487] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098e6d52-d3ab-4d53-9843-fefc6f3dcb13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.859276] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a8e638-8952-12e9-2ba6-0652b990e531/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1022.859508] env[69992]: ERROR oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a8e638-8952-12e9-2ba6-0652b990e531/disk-0.vmdk due to incomplete transfer. [ 1022.859967] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e4c18a53-12cc-42c5-b123-d9b77d960349 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.867686] env[69992]: DEBUG oslo_vmware.rw_handles [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a8e638-8952-12e9-2ba6-0652b990e531/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1022.867852] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Uploaded image f4723384-8c26-48b3-817e-be7849f27178 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1022.871283] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1022.874783] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3a632ed6-c9e4-48a1-b8e4-1a4674ed452d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.883369] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1022.883369] env[69992]: value = "task-2897082" [ 1022.883369] env[69992]: _type = "Task" [ 1022.883369] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.897607] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897082, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.027660] env[69992]: INFO nova.compute.manager [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] instance snapshotting [ 1023.027660] env[69992]: WARNING nova.compute.manager [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1023.031327] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739f81e4-6af8-4003-a8e3-e86a8ef05917 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.056403] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ef711a-ac75-4a27-9f05-934eb2017ee3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.069984] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0141b4-09c5-415c-82e6-973f54f9901e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.077842] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ddebaa-b179-4629-8ff2-ce6e856b8aff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.090779] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897081, 'name': RelocateVM_Task} progress is 20%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.120943] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b63d760-5c3a-47ad-a786-219a34b257e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.860689] env[69992]: DEBUG oslo_concurrency.lockutils [req-017a3991-833e-4bff-a8b3-16838c8d8598 req-73c8bcf7-0c0d-445d-9f9c-d3081b8773a8 service nova] Releasing lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.863350] env[69992]: DEBUG nova.network.neutron [-] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.864367] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.864656] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.864916] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.865153] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.865365] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.867582] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1023.877330] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0caea7b5-3406-4d30-bbad-c69d823ebcfb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.882864] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e89ccf0-aa4a-4f43-b3df-c06581a5ce06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.910145] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897081, 'name': RelocateVM_Task, 'duration_secs': 0.597166} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.910887] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897082, 'name': Destroy_Task, 'duration_secs': 0.625447} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.911552] env[69992]: DEBUG nova.compute.provider_tree [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.914605] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.916097] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1023.916567] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581910', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'name': 'volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af07ebd0-5f12-49c3-a518-95be9a8d6c82', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'serial': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1023.916905] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Destroyed the VM [ 1023.917184] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1023.918102] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1023.918102] env[69992]: value = "task-2897083" [ 1023.918102] env[69992]: _type = "Task" [ 1023.918102] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.919566] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77d44d1-2bfc-4c5d-bfcb-7f0ca158b511 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.923118] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c3395f3c-2f32-42fc-8dca-6ff4e2182914 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.928903] env[69992]: INFO nova.compute.manager [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Terminating instance [ 1023.957457] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40525cd-1844-4db5-a0f7-8a83ecdc9551 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.960516] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1023.960516] env[69992]: value = "task-2897084" [ 1023.960516] env[69992]: _type = "Task" [ 1023.960516] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.961204] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897083, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.985645] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4/volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.987053] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-122d36af-4030-4e23-87f4-b3cf78514416 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.007291] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897084, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.014171] env[69992]: DEBUG nova.compute.manager [req-1b3bf1e7-57c6-4e47-88e8-95d558263908 req-f342089d-23eb-4f61-8114-c793bd3be696 service nova] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Received event network-vif-deleted-b35f3c6b-88f2-436b-994c-ba4ce4ad2662 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1024.016367] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1024.016367] env[69992]: value = "task-2897085" [ 1024.016367] env[69992]: _type = "Task" [ 1024.016367] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.028331] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897085, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.292497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.292794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.371038] env[69992]: INFO nova.compute.manager [-] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Took 1.83 seconds to deallocate network for instance. [ 1024.387404] env[69992]: INFO nova.compute.manager [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Rebuilding instance [ 1024.420400] env[69992]: DEBUG nova.scheduler.client.report [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.435570] env[69992]: DEBUG nova.compute.manager [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1024.435895] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.438972] env[69992]: DEBUG nova.compute.manager [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.439869] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b97d02-53f4-4831-817c-c9607463e7fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.446708] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1f6ef8-44cd-47df-bcfb-bb80ccd6f878 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.456123] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897083, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.459610] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.462943] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dd32e8a-7abc-47bd-b330-7fea8713893d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.474923] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897084, 'name': RemoveSnapshot_Task, 'duration_secs': 0.473985} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.475999] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1024.475999] env[69992]: DEBUG nova.compute.manager [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.476306] env[69992]: DEBUG oslo_vmware.api [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 1024.476306] env[69992]: value = "task-2897086" [ 1024.476306] env[69992]: _type = "Task" [ 1024.476306] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.478022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41fb0b0-c307-4a72-bcc1-8bb2cceeb5c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.492963] env[69992]: DEBUG oslo_vmware.api [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2897086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.528904] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897085, 'name': ReconfigVM_Task, 'duration_secs': 0.333049} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.529289] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Reconfigured VM instance instance-0000002e to attach disk [datastore2] volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4/volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.535034] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfbc1ac9-0ba7-41ef-9c0f-85e76f501ba9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.552247] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1024.552247] env[69992]: value = "task-2897087" [ 1024.552247] env[69992]: _type = "Task" [ 1024.552247] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.563746] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897087, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.879313] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.926447] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.097s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.929051] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.817s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.930761] env[69992]: INFO nova.compute.claims [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.946353] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897083, 'name': CreateSnapshot_Task, 'duration_secs': 0.741558} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.946660] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1024.947670] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40b4218-26f2-44d7-b80d-f93d6dde60a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.950757] env[69992]: INFO nova.scheduler.client.report [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Deleted allocations for instance a9274dfc-afbd-419b-a98b-053d71a05d7c [ 1024.990487] env[69992]: DEBUG oslo_vmware.api [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2897086, 'name': PowerOffVM_Task, 'duration_secs': 0.192459} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.990787] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.990969] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.991253] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfe29386-e1fb-4a6f-aeda-4a430845a5ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.996721] env[69992]: INFO nova.compute.manager [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Shelve offloading [ 1025.059471] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1025.060099] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1025.060528] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Deleting the datastore file [datastore2] 73e41918-88b8-4ff7-9fdd-b45ac97c80ec {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.065127] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-343a1b91-120f-4823-a16e-93f0c0e19dbf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.067431] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897087, 'name': ReconfigVM_Task, 'duration_secs': 0.143387} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.067753] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581910', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'name': 'volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af07ebd0-5f12-49c3-a518-95be9a8d6c82', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'serial': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1025.068753] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77180fd1-40f0-475e-b81a-bc43600db3b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.073254] env[69992]: DEBUG oslo_vmware.api [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for the task: (returnval){ [ 1025.073254] env[69992]: value = "task-2897089" [ 1025.073254] env[69992]: _type = "Task" [ 1025.073254] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.078031] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1025.078031] env[69992]: value = "task-2897090" [ 1025.078031] env[69992]: _type = "Task" [ 1025.078031] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.084106] env[69992]: DEBUG oslo_vmware.api [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2897089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.088931] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897090, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.469985] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1025.470572] env[69992]: DEBUG oslo_concurrency.lockutils [None req-688438ad-e731-48f8-88f8-59eed5eb5260 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "a9274dfc-afbd-419b-a98b-053d71a05d7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.171s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.471940] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.472294] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-00555ee5-3dba-4019-b453-3d5be04f65a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.475880] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2152ed08-875c-45eb-bd7b-35bb58749049 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.482264] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1025.482264] env[69992]: value = "task-2897091" [ 1025.482264] env[69992]: _type = "Task" [ 1025.482264] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.483594] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1025.483594] env[69992]: value = "task-2897092" [ 1025.483594] env[69992]: _type = "Task" [ 1025.483594] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.496199] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.499864] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.500270] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897092, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.500766] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41be7c48-06f5-43d4-b8aa-78450a735174 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.506834] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1025.506834] env[69992]: value = "task-2897093" [ 1025.506834] env[69992]: _type = "Task" [ 1025.506834] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.517864] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1025.517864] env[69992]: DEBUG nova.compute.manager [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.518222] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e93dc1-2410-45f6-bb7c-1a3bbf578585 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.523831] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.523998] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.524215] env[69992]: DEBUG nova.network.neutron [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.585875] env[69992]: DEBUG oslo_vmware.api [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Task: {'id': task-2897089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324927} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.586582] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.586776] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1025.586956] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.587148] env[69992]: INFO nova.compute.manager [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1025.587447] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.587636] env[69992]: DEBUG nova.compute.manager [-] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1025.587728] env[69992]: DEBUG nova.network.neutron [-] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1025.592419] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897090, 'name': Rename_Task, 'duration_secs': 0.185078} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.593019] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.593282] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34070cb4-3c18-47e4-9552-b35626a90de1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.599748] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1025.599748] env[69992]: value = "task-2897094" [ 1025.599748] env[69992]: _type = "Task" [ 1025.599748] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.608084] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.998885] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897091, 'name': PowerOffVM_Task, 'duration_secs': 0.245797} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.999238] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.999238] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.000031] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65dd328-6bad-4e6f-9e9f-3d68bec41840 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.007718] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897092, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.014850] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.015303] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-935b17e1-b037-42ff-a90a-8beff30cb582 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.045197] env[69992]: DEBUG nova.compute.manager [req-9db77e73-f20a-4a2b-9d6a-b7bf83a44b65 req-34880302-d175-4eaf-912d-cb9bffc89f91 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Received event network-vif-deleted-aeab7334-78de-4ade-9c52-d77911f831fb {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1026.045423] env[69992]: INFO nova.compute.manager [req-9db77e73-f20a-4a2b-9d6a-b7bf83a44b65 req-34880302-d175-4eaf-912d-cb9bffc89f91 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Neutron deleted interface aeab7334-78de-4ade-9c52-d77911f831fb; detaching it from the instance and deleting it from the info cache [ 1026.045657] env[69992]: DEBUG nova.network.neutron [req-9db77e73-f20a-4a2b-9d6a-b7bf83a44b65 req-34880302-d175-4eaf-912d-cb9bffc89f91 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.086521] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.086844] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.087136] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore1] 714fafbf-a765-4e2c-8633-997d8244483c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.087467] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b1945c0-03d6-4f27-afb6-59186e11a6dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.096754] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1026.096754] env[69992]: value = "task-2897096" [ 1026.096754] env[69992]: _type = "Task" [ 1026.096754] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.116160] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.124808] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897094, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.299590] env[69992]: DEBUG nova.network.neutron [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.366885] env[69992]: DEBUG nova.network.neutron [-] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.498547] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897092, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.527509] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff2bf57-77a7-408f-836c-857fde9bcce6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.535443] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9019e4-a97f-4c3a-a2fe-45ac79684a8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.566595] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bac0d178-ec5c-4b4c-adf3-8599e41aa6a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.569151] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7c7472-7e68-4c18-918c-45b5006287a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.580918] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4458cd6-7f3e-457b-9f38-57d94807cbac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.587787] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e21946c-18f2-4363-9331-7728ce81b1cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.610901] env[69992]: DEBUG nova.compute.provider_tree [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.635550] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239073} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.635982] env[69992]: DEBUG nova.compute.manager [req-9db77e73-f20a-4a2b-9d6a-b7bf83a44b65 req-34880302-d175-4eaf-912d-cb9bffc89f91 service nova] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Detach interface failed, port_id=aeab7334-78de-4ade-9c52-d77911f831fb, reason: Instance 73e41918-88b8-4ff7-9fdd-b45ac97c80ec could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1026.637498] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.637721] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.637918] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.643950] env[69992]: DEBUG oslo_vmware.api [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897094, 'name': PowerOnVM_Task, 'duration_secs': 0.800971} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.644411] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.644629] env[69992]: INFO nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Took 5.69 seconds to spawn the instance on the hypervisor. [ 1026.644891] env[69992]: DEBUG nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.645639] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6a7790-0855-40ec-8712-2c98db143d81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.802151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.869130] env[69992]: INFO nova.compute.manager [-] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Took 1.28 seconds to deallocate network for instance. [ 1026.999395] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897092, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.136398] env[69992]: DEBUG nova.scheduler.client.report [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.162033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.163027] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215bccc8-9cb2-4918-9907-60090dae6193 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.168255] env[69992]: INFO nova.compute.manager [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Took 50.32 seconds to build instance. [ 1027.174652] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.174910] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15f1860a-2cad-4764-8b93-44a7836e911c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.241432] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.241432] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.241432] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleting the datastore file [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.241432] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe2b138e-d588-416d-9c8a-8d87d51dc1b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.249023] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1027.249023] env[69992]: value = "task-2897098" [ 1027.249023] env[69992]: _type = "Task" [ 1027.249023] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.262372] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.377251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.498926] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897092, 'name': CloneVM_Task, 'duration_secs': 1.690031} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.498926] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Created linked-clone VM from snapshot [ 1027.500330] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a77b3e3-8d3f-48fc-aa7d-3e13ada97aa4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.507357] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Uploading image 546c9366-0abc-40f4-a351-6f23687d2823 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1027.532375] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1027.532375] env[69992]: value = "vm-581961" [ 1027.532375] env[69992]: _type = "VirtualMachine" [ 1027.532375] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1027.532702] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b07d54e0-c931-4e49-b9df-ebf1113686f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.539633] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease: (returnval){ [ 1027.539633] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c4d9e-3284-00a1-3976-51ea86cad598" [ 1027.539633] env[69992]: _type = "HttpNfcLease" [ 1027.539633] env[69992]: } obtained for exporting VM: (result){ [ 1027.539633] env[69992]: value = "vm-581961" [ 1027.539633] env[69992]: _type = "VirtualMachine" [ 1027.539633] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1027.540111] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the lease: (returnval){ [ 1027.540111] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c4d9e-3284-00a1-3976-51ea86cad598" [ 1027.540111] env[69992]: _type = "HttpNfcLease" [ 1027.540111] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1027.546166] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1027.546166] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c4d9e-3284-00a1-3976-51ea86cad598" [ 1027.546166] env[69992]: _type = "HttpNfcLease" [ 1027.546166] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1027.642260] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.642843] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.645981] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.234s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.647822] env[69992]: INFO nova.compute.claims [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.670977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23731eb9-b579-417b-b20a-a3be7d5c6e28 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.198s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.678190] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.678453] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.678613] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.678879] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.678960] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.679216] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.679487] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.679611] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.679797] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.679969] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.680160] env[69992]: DEBUG nova.virt.hardware [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.683057] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f97522f-7423-4ba6-9af4-5245db9c629f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.690941] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24580ac-874d-46d8-8f4a-66461cade92e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.707229] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:4d:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8930976-0d99-4add-b5de-4f68e2761d75', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.715558] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.716960] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.717899] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ec3adff-6301-4948-bfab-67e045f513e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.740025] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.740025] env[69992]: value = "task-2897100" [ 1027.740025] env[69992]: _type = "Task" [ 1027.740025] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.746642] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897100, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.758078] env[69992]: DEBUG oslo_vmware.api [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305288} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.758363] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.758549] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.758730] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.783838] env[69992]: INFO nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted allocations for instance dd31269e-716c-44cd-9fc3-ce227fe5b3b2 [ 1028.051906] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1028.051906] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c4d9e-3284-00a1-3976-51ea86cad598" [ 1028.051906] env[69992]: _type = "HttpNfcLease" [ 1028.051906] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1028.052513] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1028.052513] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c4d9e-3284-00a1-3976-51ea86cad598" [ 1028.052513] env[69992]: _type = "HttpNfcLease" [ 1028.052513] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1028.053763] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95021b24-52de-4780-b227-66f91bd8c69a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.066449] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52742487-d05e-40b3-e08b-35c9bf7d219e/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1028.066787] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52742487-d05e-40b3-e08b-35c9bf7d219e/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1028.155153] env[69992]: DEBUG nova.compute.utils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.156899] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.157091] env[69992]: DEBUG nova.network.neutron [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.177528] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.199387] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e32f4caf-3f0d-42b3-90d8-9b244142d964 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.217925] env[69992]: DEBUG nova.policy [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fa721ae334242559daabf98af6c8d9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '019d22bf8362494ab11c7a54a8035cfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.261525] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897100, 'name': CreateVM_Task, 'duration_secs': 0.423448} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.261806] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.262786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.263085] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.263986] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.264350] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d17b842b-b0c2-46e9-b352-fe242fd6f092 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.271742] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1028.271742] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a3967f-681b-d2b8-04ff-270c5e4ecb06" [ 1028.271742] env[69992]: _type = "Task" [ 1028.271742] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.281971] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a3967f-681b-d2b8-04ff-270c5e4ecb06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.291636] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.314578] env[69992]: DEBUG nova.compute.manager [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-vif-unplugged-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1028.314578] env[69992]: DEBUG oslo_concurrency.lockutils [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.314578] env[69992]: DEBUG oslo_concurrency.lockutils [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.314578] env[69992]: DEBUG oslo_concurrency.lockutils [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.314816] env[69992]: DEBUG nova.compute.manager [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] No waiting events found dispatching network-vif-unplugged-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.314816] env[69992]: WARNING nova.compute.manager [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received unexpected event network-vif-unplugged-d325d681-8643-43a2-93dd-d4687ad115f5 for instance with vm_state shelved_offloaded and task_state None. [ 1028.314966] env[69992]: DEBUG nova.compute.manager [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1028.315527] env[69992]: DEBUG nova.compute.manager [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing instance network info cache due to event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1028.315527] env[69992]: DEBUG oslo_concurrency.lockutils [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.315527] env[69992]: DEBUG oslo_concurrency.lockutils [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.315766] env[69992]: DEBUG nova.network.neutron [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.557168] env[69992]: DEBUG nova.network.neutron [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Successfully created port: d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.618863] env[69992]: DEBUG nova.compute.manager [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Received event network-changed-bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1028.618928] env[69992]: DEBUG nova.compute.manager [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Refreshing instance network info cache due to event network-changed-bd75002a-c4e8-4f29-99ff-b6f5055c068d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1028.619285] env[69992]: DEBUG oslo_concurrency.lockutils [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] Acquiring lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.619285] env[69992]: DEBUG oslo_concurrency.lockutils [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] Acquired lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.621689] env[69992]: DEBUG nova.network.neutron [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Refreshing network info cache for port bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.664388] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.716209] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.791769] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a3967f-681b-d2b8-04ff-270c5e4ecb06, 'name': SearchDatastore_Task, 'duration_secs': 0.02911} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.792339] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.792769] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.793290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.793587] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.793945] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.801340] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e7889f7-8fca-4703-9eb0-e1a9532ea3d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.812781] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.813151] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.815969] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-264992d5-936d-449c-aa8b-8d843e314a99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.820697] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1028.820697] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521ada22-873f-1d75-702d-fa3ce9e107b7" [ 1028.820697] env[69992]: _type = "Task" [ 1028.820697] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.837150] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ada22-873f-1d75-702d-fa3ce9e107b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.138134] env[69992]: DEBUG nova.network.neutron [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updated VIF entry in instance network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1029.138484] env[69992]: DEBUG nova.network.neutron [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd325d681-86", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.343106] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ada22-873f-1d75-702d-fa3ce9e107b7, 'name': SearchDatastore_Task, 'duration_secs': 0.019809} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.344326] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-908983bf-dd24-436c-b4b5-6642173667e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.352069] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1029.352069] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e1a649-b45f-b74c-1eeb-78d05cf69d83" [ 1029.352069] env[69992]: _type = "Task" [ 1029.352069] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.370736] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e1a649-b45f-b74c-1eeb-78d05cf69d83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.392907] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f82e15-a136-48a9-a06c-cd5f52fc9685 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.400637] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cea909-0b1c-4709-a872-07c05f3f2609 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.435362] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e6b279-26a2-48cd-b5f3-5e34425629bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.445511] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ea2914-9fbf-4b9d-8dad-b6780e2788c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.460910] env[69992]: DEBUG nova.compute.provider_tree [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.499323] env[69992]: DEBUG nova.network.neutron [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updated VIF entry in instance network info cache for port bd75002a-c4e8-4f29-99ff-b6f5055c068d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1029.500649] env[69992]: DEBUG nova.network.neutron [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updating instance_info_cache with network_info: [{"id": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "address": "fa:16:3e:74:86:2a", "network": {"id": "19dbabdd-51f5-46b4-aacb-e8a35d25a612", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-686158214-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b2281c7442a4ab798e2581de5f8cdce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e2ec358-9bc5-4dd6-8f4e-0d6ec225282a", "external-id": "nsx-vlan-transportzone-843", "segmentation_id": 843, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd75002a-c4", "ovs_interfaceid": "bd75002a-c4e8-4f29-99ff-b6f5055c068d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.645654] env[69992]: DEBUG oslo_concurrency.lockutils [req-69c941c7-93b5-47a9-ad82-b63cb865369e req-10ff21ca-6b04-4fef-8753-c7533103f2a0 service nova] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.681860] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.868767] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e1a649-b45f-b74c-1eeb-78d05cf69d83, 'name': SearchDatastore_Task, 'duration_secs': 0.017755} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.869164] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.869513] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1029.869802] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bedf7816-0344-4e7c-8cbd-c98433e8e82c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.879652] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1029.879652] env[69992]: value = "task-2897101" [ 1029.879652] env[69992]: _type = "Task" [ 1029.879652] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.890294] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.964611] env[69992]: DEBUG nova.scheduler.client.report [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.004138] env[69992]: DEBUG oslo_concurrency.lockutils [req-df24fe9c-6b33-4f7c-bde7-5a4fab8a8440 req-e3b1452f-61cf-4b51-91e2-57e829e2dbd3 service nova] Releasing lock "refresh_cache-af07ebd0-5f12-49c3-a518-95be9a8d6c82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.206481] env[69992]: DEBUG nova.network.neutron [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Successfully updated port: d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.391035] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897101, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.472196] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.826s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.472788] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1030.475706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.688s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.475920] env[69992]: DEBUG nova.objects.instance [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lazy-loading 'resources' on Instance uuid eba81db1-973c-4981-baca-cb98e4087510 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.549922] env[69992]: DEBUG nova.compute.manager [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Received event network-vif-plugged-d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.550178] env[69992]: DEBUG oslo_concurrency.lockutils [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] Acquiring lock "b3d62400-e639-4c49-9207-64fd1e684f99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.550725] env[69992]: DEBUG oslo_concurrency.lockutils [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] Lock "b3d62400-e639-4c49-9207-64fd1e684f99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.550812] env[69992]: DEBUG oslo_concurrency.lockutils [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] Lock "b3d62400-e639-4c49-9207-64fd1e684f99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.551018] env[69992]: DEBUG nova.compute.manager [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] No waiting events found dispatching network-vif-plugged-d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1030.551130] env[69992]: WARNING nova.compute.manager [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Received unexpected event network-vif-plugged-d93d2fb3-db84-4122-8820-bc39368ea460 for instance with vm_state building and task_state spawning. [ 1030.551284] env[69992]: DEBUG nova.compute.manager [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Received event network-changed-d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.551445] env[69992]: DEBUG nova.compute.manager [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Refreshing instance network info cache due to event network-changed-d93d2fb3-db84-4122-8820-bc39368ea460. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1030.551639] env[69992]: DEBUG oslo_concurrency.lockutils [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] Acquiring lock "refresh_cache-b3d62400-e639-4c49-9207-64fd1e684f99" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.551778] env[69992]: DEBUG oslo_concurrency.lockutils [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] Acquired lock "refresh_cache-b3d62400-e639-4c49-9207-64fd1e684f99" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.551944] env[69992]: DEBUG nova.network.neutron [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Refreshing network info cache for port d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1030.709177] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "refresh_cache-b3d62400-e639-4c49-9207-64fd1e684f99" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.890723] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897101, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725027} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.892025] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.892025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.892025] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-995cd9dd-cdeb-44bb-88ea-e68c696da5e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.898230] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1030.898230] env[69992]: value = "task-2897102" [ 1030.898230] env[69992]: _type = "Task" [ 1030.898230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.906402] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.979298] env[69992]: DEBUG nova.compute.utils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1030.984358] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1030.984597] env[69992]: DEBUG nova.network.neutron [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1031.027604] env[69992]: DEBUG nova.policy [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7328dcabedee4188b3aae3d19b1a6a5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfe09ae17ec5434ab3b8f4d7ab5d0cf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1031.113813] env[69992]: DEBUG nova.network.neutron [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.211770] env[69992]: DEBUG nova.network.neutron [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.279805] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1031.282409] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.282409] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1031.282409] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.282409] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1031.282409] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1031.282857] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1031.282857] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1031.282857] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1031.282857] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1031.282857] env[69992]: DEBUG nova.virt.hardware [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1031.284514] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d74eb19-a93e-4063-aae2-324af37111fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.296092] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52853ae9-d355-3c98-2f7c-dbaf323aba72/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1031.296966] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf58c31-2cc3-4497-940d-31b68299b901 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.307127] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02541aa2-e6ff-4352-8658-7a30ece5951c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.312481] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52853ae9-d355-3c98-2f7c-dbaf323aba72/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1031.312692] env[69992]: ERROR oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52853ae9-d355-3c98-2f7c-dbaf323aba72/disk-0.vmdk due to incomplete transfer. [ 1031.313361] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ee478e09-47e7-4321-9716-a8cfed3fb6d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.334328] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52853ae9-d355-3c98-2f7c-dbaf323aba72/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1031.334645] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Uploaded image 6534e8d5-5df3-44be-a620-28db421259d9 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1031.336592] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1031.336850] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2733fb06-d02d-48ca-9194-9e188db7f01f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.345987] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1031.345987] env[69992]: value = "task-2897103" [ 1031.345987] env[69992]: _type = "Task" [ 1031.345987] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.356887] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897103, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.395878] env[69992]: DEBUG nova.network.neutron [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Successfully created port: 9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.407406] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134516} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.409944] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.411076] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0973bd-abca-4af2-9776-8f3a94056f9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.436794] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.440519] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d96bc1b7-b402-4a36-a6a1-0b4f0f190b85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.463606] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1031.463606] env[69992]: value = "task-2897104" [ 1031.463606] env[69992]: _type = "Task" [ 1031.463606] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.472054] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.486795] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1031.657524] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4018cf6-cb10-4f90-b284-f0aeb806be11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.665729] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68071dd-2337-4258-aba9-a32908c3c1f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.700692] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c692db-6e63-4bce-a98f-e516ac9a60fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.708095] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e2d7d7-50f9-4470-bf95-2c4ec4cca54c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.723386] env[69992]: DEBUG oslo_concurrency.lockutils [req-e55c4a84-373a-4c50-8ef4-bdb0680cc404 req-15c1bcc9-3a14-4cae-99c4-fd08a616ea95 service nova] Releasing lock "refresh_cache-b3d62400-e639-4c49-9207-64fd1e684f99" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.723952] env[69992]: DEBUG nova.compute.provider_tree [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.725355] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquired lock "refresh_cache-b3d62400-e639-4c49-9207-64fd1e684f99" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.725677] env[69992]: DEBUG nova.network.neutron [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.855780] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897103, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.976334] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.228124] env[69992]: DEBUG nova.scheduler.client.report [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.278666] env[69992]: DEBUG nova.network.neutron [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.357689] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897103, 'name': Destroy_Task, 'duration_secs': 0.76047} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.357689] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroyed the VM [ 1032.357867] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1032.358198] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7c3b89d6-5da8-4ff0-9919-4918485bdde9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.366496] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1032.366496] env[69992]: value = "task-2897105" [ 1032.366496] env[69992]: _type = "Task" [ 1032.366496] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.377383] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897105, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.415122] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.475681] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897104, 'name': ReconfigVM_Task, 'duration_secs': 0.64977} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.476066] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 714fafbf-a765-4e2c-8633-997d8244483c/714fafbf-a765-4e2c-8633-997d8244483c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.476735] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6e22c84-ff30-46bb-b1bf-a3a712fc0518 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.485131] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1032.485131] env[69992]: value = "task-2897106" [ 1032.485131] env[69992]: _type = "Task" [ 1032.485131] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.493582] env[69992]: DEBUG nova.network.neutron [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Updating instance_info_cache with network_info: [{"id": "d93d2fb3-db84-4122-8820-bc39368ea460", "address": "fa:16:3e:ae:db:7b", "network": {"id": "a0bed342-b820-439c-be85-37774f323c77", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1083207164-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "019d22bf8362494ab11c7a54a8035cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93d2fb3-db", "ovs_interfaceid": "d93d2fb3-db84-4122-8820-bc39368ea460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.500020] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1032.505717] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897106, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.529168] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1032.529442] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.529608] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1032.529860] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.530085] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1032.530298] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1032.530582] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1032.530767] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1032.530942] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1032.531127] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1032.531352] env[69992]: DEBUG nova.virt.hardware [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1032.532472] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3037568f-aa4c-4c39-84d0-8b717a76ed90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.543102] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2f0651-0f4c-442c-b7b6-38db6a5566b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.735448] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.259s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.738521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.093s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.740652] env[69992]: INFO nova.compute.claims [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.766236] env[69992]: INFO nova.scheduler.client.report [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Deleted allocations for instance eba81db1-973c-4981-baca-cb98e4087510 [ 1032.813496] env[69992]: DEBUG nova.compute.manager [req-7f9922cf-7178-4755-884b-9b3fbf486f4b req-287b3106-8678-44b8-b6b4-b705b3ad0509 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Received event network-vif-plugged-9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.813711] env[69992]: DEBUG oslo_concurrency.lockutils [req-7f9922cf-7178-4755-884b-9b3fbf486f4b req-287b3106-8678-44b8-b6b4-b705b3ad0509 service nova] Acquiring lock "7fc7c481-75e8-40f2-a971-752ce6dde59b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.814145] env[69992]: DEBUG oslo_concurrency.lockutils [req-7f9922cf-7178-4755-884b-9b3fbf486f4b req-287b3106-8678-44b8-b6b4-b705b3ad0509 service nova] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.814145] env[69992]: DEBUG oslo_concurrency.lockutils [req-7f9922cf-7178-4755-884b-9b3fbf486f4b req-287b3106-8678-44b8-b6b4-b705b3ad0509 service nova] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.814305] env[69992]: DEBUG nova.compute.manager [req-7f9922cf-7178-4755-884b-9b3fbf486f4b req-287b3106-8678-44b8-b6b4-b705b3ad0509 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] No waiting events found dispatching network-vif-plugged-9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.814657] env[69992]: WARNING nova.compute.manager [req-7f9922cf-7178-4755-884b-9b3fbf486f4b req-287b3106-8678-44b8-b6b4-b705b3ad0509 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Received unexpected event network-vif-plugged-9a92d7a7-73b6-4bd0-b812-3af4be317ae5 for instance with vm_state building and task_state spawning. [ 1032.879497] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897105, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.915287] env[69992]: DEBUG nova.network.neutron [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Successfully updated port: 9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.997909] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Releasing lock "refresh_cache-b3d62400-e639-4c49-9207-64fd1e684f99" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.997909] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Instance network_info: |[{"id": "d93d2fb3-db84-4122-8820-bc39368ea460", "address": "fa:16:3e:ae:db:7b", "network": {"id": "a0bed342-b820-439c-be85-37774f323c77", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1083207164-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "019d22bf8362494ab11c7a54a8035cfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd93d2fb3-db", "ovs_interfaceid": "d93d2fb3-db84-4122-8820-bc39368ea460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.998101] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897106, 'name': Rename_Task, 'duration_secs': 0.234856} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.998385] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:db:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62d6a386-ffdb-4232-83f3-cb21c5e59e85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd93d2fb3-db84-4122-8820-bc39368ea460', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.007248] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Creating folder: Project (019d22bf8362494ab11c7a54a8035cfa). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1033.007493] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.007809] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84a9c790-1689-49e9-abc2-43c45a254efd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.010088] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5f95ff8-ba5f-4f86-9121-da396aa6c28a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.020225] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1033.020225] env[69992]: value = "task-2897107" [ 1033.020225] env[69992]: _type = "Task" [ 1033.020225] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.027194] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Created folder: Project (019d22bf8362494ab11c7a54a8035cfa) in parent group-v581821. [ 1033.027464] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Creating folder: Instances. Parent ref: group-v581963. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1033.028289] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-153cce74-d88b-415e-9a8c-8c0af836f4f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.034382] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.046137] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Created folder: Instances in parent group-v581963. [ 1033.046416] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.046640] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.046874] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87c78cd1-78cb-4e44-9e2f-b00e5e0ecdbc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.070509] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.070509] env[69992]: value = "task-2897110" [ 1033.070509] env[69992]: _type = "Task" [ 1033.070509] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.080710] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897110, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.275246] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f25b12ef-781d-493f-968d-59368940aba4 tempest-VolumesAssistedSnapshotsTest-1351026479 tempest-VolumesAssistedSnapshotsTest-1351026479-project-member] Lock "eba81db1-973c-4981-baca-cb98e4087510" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.435s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.379400] env[69992]: DEBUG oslo_vmware.api [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897105, 'name': RemoveSnapshot_Task, 'duration_secs': 0.883099} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.379573] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1033.379693] env[69992]: INFO nova.compute.manager [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 17.17 seconds to snapshot the instance on the hypervisor. [ 1033.418332] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.418486] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquired lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.418635] env[69992]: DEBUG nova.network.neutron [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.533347] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897107, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.581975] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897110, 'name': CreateVM_Task, 'duration_secs': 0.440741} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.582326] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.583184] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.583559] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.584075] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.584612] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ff38b05-6689-4977-9109-d2af11b96fae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.593110] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1033.593110] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bde0c9-2c34-92c6-af47-4bd0aa0adf98" [ 1033.593110] env[69992]: _type = "Task" [ 1033.593110] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.601890] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bde0c9-2c34-92c6-af47-4bd0aa0adf98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.968504] env[69992]: DEBUG nova.compute.manager [None req-1146d697-2aad-4c6b-b1d4-1c1aad76e281 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Found 1 images (rotation: 2) {{(pid=69992) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1034.022387] env[69992]: DEBUG nova.network.neutron [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.041645] env[69992]: DEBUG oslo_vmware.api [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897107, 'name': PowerOnVM_Task, 'duration_secs': 0.675904} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.041943] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.042429] env[69992]: DEBUG nova.compute.manager [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.043328] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9cde3a-3de7-489a-a87c-0637fcae9f82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.107365] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bde0c9-2c34-92c6-af47-4bd0aa0adf98, 'name': SearchDatastore_Task, 'duration_secs': 0.023529} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.111143] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.111402] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.111665] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.111789] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.111969] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.117236] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-285701ab-7466-41e9-a0fd-89ce4ae85c95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.129137] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.129137] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.129137] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75f5a82c-70e9-495f-bb65-ec9c1d7d17cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.137147] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1034.137147] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5291c173-1b91-76af-b806-c3f91ec3a4e7" [ 1034.137147] env[69992]: _type = "Task" [ 1034.137147] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.147258] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5291c173-1b91-76af-b806-c3f91ec3a4e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.307305] env[69992]: DEBUG nova.network.neutron [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updating instance_info_cache with network_info: [{"id": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "address": "fa:16:3e:18:c5:72", "network": {"id": "0b5f4ed2-224e-46b4-8409-8afd9b110869", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1025311448-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe09ae17ec5434ab3b8f4d7ab5d0cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a92d7a7-73", "ovs_interfaceid": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.429869] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eded172-2b04-4091-9ee4-06ca864020fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.444047] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cc1d67-30c7-4f98-b30f-2b67ee4d65d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.495040] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369d54e6-6fdb-40d2-9f1b-70a0b17996c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.510827] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f512a8b7-d056-48af-8d5e-d3159cec66f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.534317] env[69992]: DEBUG nova.compute.provider_tree [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.566652] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.650596] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5291c173-1b91-76af-b806-c3f91ec3a4e7, 'name': SearchDatastore_Task, 'duration_secs': 0.027727} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.650980] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edf4622b-dd86-4bc9-a4d9-2be9970ea3c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.657926] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1034.657926] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b9c11d-45bc-abe4-c96a-337dfd5fa35e" [ 1034.657926] env[69992]: _type = "Task" [ 1034.657926] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.667905] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b9c11d-45bc-abe4-c96a-337dfd5fa35e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.811375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Releasing lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.811717] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Instance network_info: |[{"id": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "address": "fa:16:3e:18:c5:72", "network": {"id": "0b5f4ed2-224e-46b4-8409-8afd9b110869", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1025311448-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe09ae17ec5434ab3b8f4d7ab5d0cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a92d7a7-73", "ovs_interfaceid": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1034.812144] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:c5:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a92d7a7-73b6-4bd0-b812-3af4be317ae5', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.824766] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Creating folder: Project (bfe09ae17ec5434ab3b8f4d7ab5d0cf0). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.825086] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aea2e1c1-6b27-4d63-a6a6-360edb19e1a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.839431] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Created folder: Project (bfe09ae17ec5434ab3b8f4d7ab5d0cf0) in parent group-v581821. [ 1034.843175] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Creating folder: Instances. Parent ref: group-v581966. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.843175] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3076c65-6286-46db-af26-1ce5a6701106 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.851627] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Created folder: Instances in parent group-v581966. [ 1034.851892] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1034.852109] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.852362] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40e7505e-6ec9-4c2b-9497-3737097db09a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.878839] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.878839] env[69992]: value = "task-2897113" [ 1034.878839] env[69992]: _type = "Task" [ 1034.878839] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.887989] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897113, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.004559] env[69992]: DEBUG nova.compute.manager [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1035.008259] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f4a67-6d5e-4d3b-88a1-7211132e36af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.038383] env[69992]: DEBUG nova.scheduler.client.report [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.150137] env[69992]: DEBUG nova.compute.manager [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Received event network-changed-9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.150362] env[69992]: DEBUG nova.compute.manager [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Refreshing instance network info cache due to event network-changed-9a92d7a7-73b6-4bd0-b812-3af4be317ae5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1035.150576] env[69992]: DEBUG oslo_concurrency.lockutils [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] Acquiring lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.150720] env[69992]: DEBUG oslo_concurrency.lockutils [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] Acquired lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.150897] env[69992]: DEBUG nova.network.neutron [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Refreshing network info cache for port 9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.168963] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b9c11d-45bc-abe4-c96a-337dfd5fa35e, 'name': SearchDatastore_Task, 'duration_secs': 0.021951} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.169598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.169877] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b3d62400-e639-4c49-9207-64fd1e684f99/b3d62400-e639-4c49-9207-64fd1e684f99.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.170370] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-802539f3-30dc-4e35-8fa8-902a1ec5dd3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.179032] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1035.179032] env[69992]: value = "task-2897114" [ 1035.179032] env[69992]: _type = "Task" [ 1035.179032] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.188035] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.394890] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897113, 'name': CreateVM_Task, 'duration_secs': 0.493713} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.395297] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.396146] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.396407] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.396883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1035.397292] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2719b01a-1e5a-415f-8c6a-30856e6e3a8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.408056] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1035.408056] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527322a9-873d-e06d-ada2-476d7c6e303e" [ 1035.408056] env[69992]: _type = "Task" [ 1035.408056] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.420987] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527322a9-873d-e06d-ada2-476d7c6e303e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.518908] env[69992]: INFO nova.compute.manager [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] instance snapshotting [ 1035.519723] env[69992]: DEBUG nova.objects.instance [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'flavor' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.544693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.548029] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1035.548704] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.668s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.548987] env[69992]: DEBUG nova.objects.instance [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lazy-loading 'resources' on Instance uuid e0b5ad16-f631-444c-a189-167e34574316 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.689533] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502212} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.689867] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b3d62400-e639-4c49-9207-64fd1e684f99/b3d62400-e639-4c49-9207-64fd1e684f99.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.690273] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.690704] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32f92b7a-f577-4e53-9dc3-7b2804ffed52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.699916] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1035.699916] env[69992]: value = "task-2897115" [ 1035.699916] env[69992]: _type = "Task" [ 1035.699916] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.711979] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897115, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.780505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "714fafbf-a765-4e2c-8633-997d8244483c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.780505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "714fafbf-a765-4e2c-8633-997d8244483c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.780505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "714fafbf-a765-4e2c-8633-997d8244483c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.780505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "714fafbf-a765-4e2c-8633-997d8244483c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.780806] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "714fafbf-a765-4e2c-8633-997d8244483c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.782139] env[69992]: INFO nova.compute.manager [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Terminating instance [ 1035.924077] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527322a9-873d-e06d-ada2-476d7c6e303e, 'name': SearchDatastore_Task, 'duration_secs': 0.059486} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.924077] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.924077] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.924077] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.924273] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.924273] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.924273] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71a2e526-dd36-4aab-af1d-0647a4333c1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.938184] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.939425] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.940321] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a0a0b8-2dd6-43a9-aefd-38156ecbfb01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.949655] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1035.949655] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528fe804-b4b0-ee55-6157-05d4b85c3145" [ 1035.949655] env[69992]: _type = "Task" [ 1035.949655] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.960759] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528fe804-b4b0-ee55-6157-05d4b85c3145, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.028047] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7e9772-e302-4c8a-8262-c9b010dc3aef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.051365] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc2c281-b360-4bf3-8e4b-ae56a576f587 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.056027] env[69992]: DEBUG nova.compute.utils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1036.059863] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1036.060067] env[69992]: DEBUG nova.network.neutron [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1036.128897] env[69992]: DEBUG nova.policy [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22958db7f0e94b1887f1cebfef8d1f3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bb19b95496548c084be8a8c87b8cd94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1036.215583] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897115, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164878} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.215897] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.216880] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cfafe6-b89a-47e8-866b-88fd068c40e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.251531] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] b3d62400-e639-4c49-9207-64fd1e684f99/b3d62400-e639-4c49-9207-64fd1e684f99.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.257339] env[69992]: DEBUG nova.network.neutron [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updated VIF entry in instance network info cache for port 9a92d7a7-73b6-4bd0-b812-3af4be317ae5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.257763] env[69992]: DEBUG nova.network.neutron [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updating instance_info_cache with network_info: [{"id": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "address": "fa:16:3e:18:c5:72", "network": {"id": "0b5f4ed2-224e-46b4-8409-8afd9b110869", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1025311448-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe09ae17ec5434ab3b8f4d7ab5d0cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a92d7a7-73", "ovs_interfaceid": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.259412] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82afb607-c3c1-4422-8d59-c01f00e860f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.275768] env[69992]: DEBUG oslo_concurrency.lockutils [req-5c660fc1-42e1-41a3-ac46-7213eef736db req-81753930-91fa-41b8-b730-24ec3ede2f67 service nova] Releasing lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.287329] env[69992]: DEBUG nova.compute.manager [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.287525] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.287967] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1036.287967] env[69992]: value = "task-2897116" [ 1036.287967] env[69992]: _type = "Task" [ 1036.287967] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.297310] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6e803a-c9c6-4202-9219-62d30b9aff19 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.307901] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897116, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.311622] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.311622] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40721945-4b69-4665-905e-5a12c35bd2b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.319184] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1036.319184] env[69992]: value = "task-2897117" [ 1036.319184] env[69992]: _type = "Task" [ 1036.319184] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.333058] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.417172] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "9df7b187-e579-41b0-9d24-be2a1ae93079" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.417495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.463516] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528fe804-b4b0-ee55-6157-05d4b85c3145, 'name': SearchDatastore_Task, 'duration_secs': 0.015575} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.464362] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8714f49-b3e4-42b7-9b12-b02c53916150 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.471341] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1036.471341] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526f1967-7367-6914-c4c4-e520f22caef3" [ 1036.471341] env[69992]: _type = "Task" [ 1036.471341] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.487507] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526f1967-7367-6914-c4c4-e520f22caef3, 'name': SearchDatastore_Task, 'duration_secs': 0.012173} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.487939] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.488101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 7fc7c481-75e8-40f2-a971-752ce6dde59b/7fc7c481-75e8-40f2-a971-752ce6dde59b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.489159] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e58b833-c688-4c50-a5b4-e598eaf5f153 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.500957] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1036.500957] env[69992]: value = "task-2897118" [ 1036.500957] env[69992]: _type = "Task" [ 1036.500957] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.514548] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.566150] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1036.572912] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1036.573277] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-48b9a096-445c-4472-b033-458da3c30bee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.590076] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1036.590076] env[69992]: value = "task-2897119" [ 1036.590076] env[69992]: _type = "Task" [ 1036.590076] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.600935] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897119, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.615786] env[69992]: DEBUG nova.network.neutron [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Successfully created port: e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1036.815833] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897116, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.835784] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.849282] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73557679-4784-4971-a83d-8150a08cfc5d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.859124] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078f8c86-b834-4524-9540-e440c10cce41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.905415] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7327429-abde-40ae-b2cf-a1aa39d9e449 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.916706] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1ea68d-32d9-488d-bba8-567e035c3bba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.933812] env[69992]: DEBUG nova.compute.provider_tree [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.014307] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477689} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.014594] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 7fc7c481-75e8-40f2-a971-752ce6dde59b/7fc7c481-75e8-40f2-a971-752ce6dde59b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.014809] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.015209] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72985656-ee62-4029-b2f3-76382a7b53bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.026015] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1037.026015] env[69992]: value = "task-2897120" [ 1037.026015] env[69992]: _type = "Task" [ 1037.026015] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.038528] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.103148] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897119, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.309843] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897116, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.335336] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897117, 'name': PowerOffVM_Task, 'duration_secs': 0.808225} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.335653] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.335826] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.336101] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bb0662e-cc34-4af8-812c-58ce29df0f91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.413017] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.413889] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.413889] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore2] 714fafbf-a765-4e2c-8633-997d8244483c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.414061] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfe4f04f-8c1c-46ff-aa5f-2e140923ba3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.421345] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1037.421345] env[69992]: value = "task-2897122" [ 1037.421345] env[69992]: _type = "Task" [ 1037.421345] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.429360] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.437402] env[69992]: DEBUG nova.scheduler.client.report [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.536720] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105452} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.536990] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.537780] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6c54b4-b5e3-4541-a452-d967c64d14af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.561663] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 7fc7c481-75e8-40f2-a971-752ce6dde59b/7fc7c481-75e8-40f2-a971-752ce6dde59b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.561994] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a56e5e54-9b17-4692-9b9e-1aec0572d7d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.577913] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1037.588091] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1037.588091] env[69992]: value = "task-2897123" [ 1037.588091] env[69992]: _type = "Task" [ 1037.588091] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.601324] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897123, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.606817] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897119, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.609119] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1037.609515] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.609805] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1037.610046] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.610195] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1037.610350] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1037.610567] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1037.610728] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1037.610918] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1037.611116] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1037.611292] env[69992]: DEBUG nova.virt.hardware [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1037.612327] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ae4048-5c67-4ebd-9eb2-a3bd355235dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.621038] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e900ff97-a0ee-4248-9d37-6abdb5230fd6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.810222] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897116, 'name': ReconfigVM_Task, 'duration_secs': 1.034044} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.810603] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Reconfigured VM instance instance-0000002f to attach disk [datastore1] b3d62400-e639-4c49-9207-64fd1e684f99/b3d62400-e639-4c49-9207-64fd1e684f99.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.811606] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1307226a-8952-4384-bd36-822cb2be8fcc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.819698] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1037.819698] env[69992]: value = "task-2897124" [ 1037.819698] env[69992]: _type = "Task" [ 1037.819698] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.828975] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897124, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.937467] env[69992]: DEBUG oslo_vmware.api [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172764} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.937467] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.937467] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.937467] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.937467] env[69992]: INFO nova.compute.manager [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1037.937649] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.939854] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52742487-d05e-40b3-e08b-35c9bf7d219e/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1037.940308] env[69992]: DEBUG nova.compute.manager [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.940539] env[69992]: DEBUG nova.network.neutron [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.943147] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7e923a-ef5c-479a-b2e7-f0bbae25a7f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.946351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.398s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.948728] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.102s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.953354] env[69992]: INFO nova.compute.claims [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.966075] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52742487-d05e-40b3-e08b-35c9bf7d219e/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1037.966075] env[69992]: ERROR oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52742487-d05e-40b3-e08b-35c9bf7d219e/disk-0.vmdk due to incomplete transfer. [ 1037.969229] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6b05bf9c-d48b-4872-ab81-6bfd00161ad0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.980419] env[69992]: DEBUG oslo_vmware.rw_handles [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52742487-d05e-40b3-e08b-35c9bf7d219e/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1037.980630] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Uploaded image 546c9366-0abc-40f4-a351-6f23687d2823 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1037.982961] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1037.983608] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c0a23a31-019f-494f-a3f3-f8f120d2a4d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.992676] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1037.992676] env[69992]: value = "task-2897125" [ 1037.992676] env[69992]: _type = "Task" [ 1037.992676] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.004906] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897125, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.067329] env[69992]: INFO nova.scheduler.client.report [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Deleted allocations for instance e0b5ad16-f631-444c-a189-167e34574316 [ 1038.115754] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897123, 'name': ReconfigVM_Task, 'duration_secs': 0.374861} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.115848] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897119, 'name': CreateSnapshot_Task, 'duration_secs': 1.146355} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.116161] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 7fc7c481-75e8-40f2-a971-752ce6dde59b/7fc7c481-75e8-40f2-a971-752ce6dde59b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.116866] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1038.117925] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-705ef7c6-573a-458c-9066-27088d972768 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.121184] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb53de5-8103-48ab-9021-f7a3a556ba8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.137999] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1038.137999] env[69992]: value = "task-2897126" [ 1038.137999] env[69992]: _type = "Task" [ 1038.137999] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.149417] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897126, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.331870] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897124, 'name': Rename_Task, 'duration_secs': 0.194208} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.332419] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.332793] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1467c039-f75b-41a9-a4d6-8b83f7b30990 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.341355] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1038.341355] env[69992]: value = "task-2897127" [ 1038.341355] env[69992]: _type = "Task" [ 1038.341355] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.349928] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.504263] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897125, 'name': Destroy_Task, 'duration_secs': 0.369876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.504613] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Destroyed the VM [ 1038.504912] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1038.505240] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c9e22800-5a84-43ef-9099-2a53f12cad30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.514304] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1038.514304] env[69992]: value = "task-2897128" [ 1038.514304] env[69992]: _type = "Task" [ 1038.514304] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.525688] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897128, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.579961] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b209c2bb-97ad-4dfa-9dfb-1a3446f56228 tempest-ServersTestJSON-36232975 tempest-ServersTestJSON-36232975-project-member] Lock "e0b5ad16-f631-444c-a189-167e34574316" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.593s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.647982] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1038.651887] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0f70882a-d9be-48d5-842a-5471ba690157 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.667077] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897126, 'name': Rename_Task, 'duration_secs': 0.183431} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.669169] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.669288] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1038.669288] env[69992]: value = "task-2897129" [ 1038.669288] env[69992]: _type = "Task" [ 1038.669288] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.669478] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7d5a8bd-713f-42a8-b2d5-f9ccf8b497f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.685113] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897129, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.686052] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1038.686052] env[69992]: value = "task-2897130" [ 1038.686052] env[69992]: _type = "Task" [ 1038.686052] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.695728] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.794866] env[69992]: DEBUG nova.network.neutron [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Successfully updated port: e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1038.810750] env[69992]: DEBUG nova.compute.manager [req-2c1681ec-c7dd-4b15-b223-bc9cf105aa1c req-2814e091-f7c0-4ed5-93a1-6b79206bfd23 service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Received event network-vif-deleted-a8930976-0d99-4add-b5de-4f68e2761d75 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.810827] env[69992]: INFO nova.compute.manager [req-2c1681ec-c7dd-4b15-b223-bc9cf105aa1c req-2814e091-f7c0-4ed5-93a1-6b79206bfd23 service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Neutron deleted interface a8930976-0d99-4add-b5de-4f68e2761d75; detaching it from the instance and deleting it from the info cache [ 1038.811232] env[69992]: DEBUG nova.network.neutron [req-2c1681ec-c7dd-4b15-b223-bc9cf105aa1c req-2814e091-f7c0-4ed5-93a1-6b79206bfd23 service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.853576] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897127, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.876054] env[69992]: DEBUG nova.compute.manager [req-c09c7c12-3a69-4967-86a6-3fce9b957aa9 req-a7c0644e-a432-4bdb-a102-68a3c434cd57 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Received event network-vif-plugged-e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.876826] env[69992]: DEBUG oslo_concurrency.lockutils [req-c09c7c12-3a69-4967-86a6-3fce9b957aa9 req-a7c0644e-a432-4bdb-a102-68a3c434cd57 service nova] Acquiring lock "a06d4b38-0e39-46ef-a588-7627661cb201-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.876826] env[69992]: DEBUG oslo_concurrency.lockutils [req-c09c7c12-3a69-4967-86a6-3fce9b957aa9 req-a7c0644e-a432-4bdb-a102-68a3c434cd57 service nova] Lock "a06d4b38-0e39-46ef-a588-7627661cb201-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.876826] env[69992]: DEBUG oslo_concurrency.lockutils [req-c09c7c12-3a69-4967-86a6-3fce9b957aa9 req-a7c0644e-a432-4bdb-a102-68a3c434cd57 service nova] Lock "a06d4b38-0e39-46ef-a588-7627661cb201-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.877018] env[69992]: DEBUG nova.compute.manager [req-c09c7c12-3a69-4967-86a6-3fce9b957aa9 req-a7c0644e-a432-4bdb-a102-68a3c434cd57 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] No waiting events found dispatching network-vif-plugged-e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.877436] env[69992]: WARNING nova.compute.manager [req-c09c7c12-3a69-4967-86a6-3fce9b957aa9 req-a7c0644e-a432-4bdb-a102-68a3c434cd57 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Received unexpected event network-vif-plugged-e4c81d0e-1575-49d0-98f5-9fd01f35158c for instance with vm_state building and task_state spawning. [ 1038.949447] env[69992]: DEBUG nova.network.neutron [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.034814] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897128, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.183307] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897129, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.197385] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897130, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.297773] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.297921] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.298110] env[69992]: DEBUG nova.network.neutron [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.320484] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8157da2a-c3e9-407b-9591-bb12db2c259a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.333753] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25646710-5240-434c-bc62-2a216e6df7be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.359529] env[69992]: DEBUG oslo_vmware.api [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897127, 'name': PowerOnVM_Task, 'duration_secs': 0.769063} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.359529] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.359529] env[69992]: INFO nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Took 9.68 seconds to spawn the instance on the hypervisor. [ 1039.359529] env[69992]: DEBUG nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.360136] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdbc028-a5d8-48ee-bcb9-c94b6e333237 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.383856] env[69992]: DEBUG nova.compute.manager [req-2c1681ec-c7dd-4b15-b223-bc9cf105aa1c req-2814e091-f7c0-4ed5-93a1-6b79206bfd23 service nova] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Detach interface failed, port_id=a8930976-0d99-4add-b5de-4f68e2761d75, reason: Instance 714fafbf-a765-4e2c-8633-997d8244483c could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1039.453151] env[69992]: INFO nova.compute.manager [-] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Took 1.51 seconds to deallocate network for instance. [ 1039.530275] env[69992]: DEBUG oslo_vmware.api [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897128, 'name': RemoveSnapshot_Task, 'duration_secs': 0.682198} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.530555] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1039.531582] env[69992]: INFO nova.compute.manager [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Took 16.50 seconds to snapshot the instance on the hypervisor. [ 1039.650769] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18033d30-4ca2-4e90-8ef2-59ebbdd0a5d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.661150] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b989c92-7f27-420e-a44c-20ea351bcff2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.698196] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a74066-2697-48cc-8390-335c689fb17a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.708863] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897129, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.712821] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163ed25f-c71a-4978-801f-d2b05bedd26a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.717090] env[69992]: DEBUG oslo_vmware.api [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897130, 'name': PowerOnVM_Task, 'duration_secs': 0.624141} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.717353] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.717552] env[69992]: INFO nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Took 7.22 seconds to spawn the instance on the hypervisor. [ 1039.717728] env[69992]: DEBUG nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.718823] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58ef17d-b193-4070-9e8c-f907ee62fba1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.730470] env[69992]: DEBUG nova.compute.provider_tree [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.865490] env[69992]: DEBUG nova.network.neutron [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1039.910743] env[69992]: INFO nova.compute.manager [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Took 43.82 seconds to build instance. [ 1039.964872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.990137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "2b1a0943-d59a-441d-a2e6-8149106803b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.990137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.990137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "2b1a0943-d59a-441d-a2e6-8149106803b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.990137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.990407] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.991123] env[69992]: INFO nova.compute.manager [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Terminating instance [ 1040.040793] env[69992]: DEBUG nova.compute.manager [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Instance disappeared during snapshot {{(pid=69992) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1040.055983] env[69992]: DEBUG nova.compute.manager [None req-095b482e-b348-4308-8d35-511af3927c38 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image not found during clean up 546c9366-0abc-40f4-a351-6f23687d2823 {{(pid=69992) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1040.132503] env[69992]: DEBUG nova.network.neutron [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updating instance_info_cache with network_info: [{"id": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "address": "fa:16:3e:41:02:53", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c81d0e-15", "ovs_interfaceid": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.201862] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897129, 'name': CloneVM_Task, 'duration_secs': 1.4675} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.202307] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created linked-clone VM from snapshot [ 1040.203300] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf96e46d-21a6-4914-b768-30412b9e0668 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.215675] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Uploading image b6861dfe-a1bf-4ef6-9649-24057f9f9e98 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1040.233958] env[69992]: DEBUG nova.scheduler.client.report [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.252184] env[69992]: INFO nova.compute.manager [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Took 38.86 seconds to build instance. [ 1040.259577] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1040.259577] env[69992]: value = "vm-581970" [ 1040.259577] env[69992]: _type = "VirtualMachine" [ 1040.259577] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1040.260209] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-afc59c4e-24b5-4c06-8577-a27ce0a8a76a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.272460] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease: (returnval){ [ 1040.272460] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a7d6c3-783f-76e0-75b0-712848a6edde" [ 1040.272460] env[69992]: _type = "HttpNfcLease" [ 1040.272460] env[69992]: } obtained for exporting VM: (result){ [ 1040.272460] env[69992]: value = "vm-581970" [ 1040.272460] env[69992]: _type = "VirtualMachine" [ 1040.272460] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1040.272711] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the lease: (returnval){ [ 1040.272711] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a7d6c3-783f-76e0-75b0-712848a6edde" [ 1040.272711] env[69992]: _type = "HttpNfcLease" [ 1040.272711] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1040.283535] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1040.283535] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a7d6c3-783f-76e0-75b0-712848a6edde" [ 1040.283535] env[69992]: _type = "HttpNfcLease" [ 1040.283535] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1040.411997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86a808ce-9814-4de8-8c42-d5b38083e5b0 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "b3d62400-e639-4c49-9207-64fd1e684f99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.826s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.500306] env[69992]: DEBUG nova.compute.manager [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1040.500306] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.500306] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b59add-4121-4f6c-afa4-428e41698da7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.510476] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.511571] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62cafe73-a345-4bf3-8aea-8efd06648494 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.589941] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.590526] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.591543] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleting the datastore file [datastore2] 2b1a0943-d59a-441d-a2e6-8149106803b6 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.592054] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95d24a7f-8785-4caa-9f9c-5cae857e7d51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.599488] env[69992]: DEBUG oslo_vmware.api [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1040.599488] env[69992]: value = "task-2897133" [ 1040.599488] env[69992]: _type = "Task" [ 1040.599488] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.608886] env[69992]: DEBUG oslo_vmware.api [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897133, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.637488] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.637841] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Instance network_info: |[{"id": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "address": "fa:16:3e:41:02:53", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c81d0e-15", "ovs_interfaceid": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1040.638281] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:02:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4c81d0e-1575-49d0-98f5-9fd01f35158c', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.646354] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Creating folder: Project (0bb19b95496548c084be8a8c87b8cd94). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1040.646354] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47fa46f5-e407-420d-b18c-0c0196edff5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.662104] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Created folder: Project (0bb19b95496548c084be8a8c87b8cd94) in parent group-v581821. [ 1040.662364] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Creating folder: Instances. Parent ref: group-v581971. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1040.662650] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28961769-bdae-4826-99b0-8e1cbf1a1c43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.677613] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Created folder: Instances in parent group-v581971. [ 1040.678941] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.678941] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1040.678941] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6834a4cf-fdc6-4a2d-a85f-46071493b593 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.702434] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.702434] env[69992]: value = "task-2897136" [ 1040.702434] env[69992]: _type = "Task" [ 1040.702434] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.712186] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897136, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.741739] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.792s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.742019] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1040.745372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.750s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.745507] env[69992]: DEBUG nova.objects.instance [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lazy-loading 'resources' on Instance uuid d361769c-bfc2-4c72-83f4-dc9b51f907a3 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.756587] env[69992]: DEBUG oslo_concurrency.lockutils [None req-217c77e6-d4fb-48ef-8e2a-50a369fb9143 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.062s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.786361] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "546fb923-4574-4407-8625-69e6c4d8d35e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.786608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "546fb923-4574-4407-8625-69e6c4d8d35e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.786803] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1040.786803] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a7d6c3-783f-76e0-75b0-712848a6edde" [ 1040.786803] env[69992]: _type = "HttpNfcLease" [ 1040.786803] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1040.787058] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1040.787058] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a7d6c3-783f-76e0-75b0-712848a6edde" [ 1040.787058] env[69992]: _type = "HttpNfcLease" [ 1040.787058] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1040.787894] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a362d96c-f8e5-49b8-b955-caafcbd4f6ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.798964] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520397a3-17da-7cd0-912c-a4ae2686050e/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1040.799181] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520397a3-17da-7cd0-912c-a4ae2686050e/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1040.904866] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-043bb866-fef7-410e-aa95-619a2fcdd511 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.916310] env[69992]: DEBUG nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1041.098094] env[69992]: DEBUG nova.compute.manager [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Received event network-changed-e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.098548] env[69992]: DEBUG nova.compute.manager [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Refreshing instance network info cache due to event network-changed-e4c81d0e-1575-49d0-98f5-9fd01f35158c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1041.098770] env[69992]: DEBUG oslo_concurrency.lockutils [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] Acquiring lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.099053] env[69992]: DEBUG oslo_concurrency.lockutils [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] Acquired lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.099247] env[69992]: DEBUG nova.network.neutron [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Refreshing network info cache for port e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.113100] env[69992]: DEBUG oslo_vmware.api [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897133, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174185} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.114029] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.114246] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.114574] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.115012] env[69992]: INFO nova.compute.manager [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1041.115328] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.115761] env[69992]: DEBUG nova.compute.manager [-] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.116117] env[69992]: DEBUG nova.network.neutron [-] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.214970] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897136, 'name': CreateVM_Task, 'duration_secs': 0.393349} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.216399] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.217109] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.218712] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.219255] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.220126] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad6e8fd7-47e3-4a03-8124-25b4277dfc85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.227878] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1041.227878] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52afba3c-d69d-162b-8012-22fbfb38db11" [ 1041.227878] env[69992]: _type = "Task" [ 1041.227878] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.238846] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52afba3c-d69d-162b-8012-22fbfb38db11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.249502] env[69992]: DEBUG nova.compute.utils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1041.255746] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1041.255746] env[69992]: DEBUG nova.network.neutron [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1041.258659] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1041.350514] env[69992]: DEBUG nova.policy [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b2730128c5e487ea5d9b5b0ae9313ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93d4e973e49e4cf98096fa30ded68db1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1041.450240] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.539107] env[69992]: DEBUG nova.compute.manager [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Received event network-changed-9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.544923] env[69992]: DEBUG nova.compute.manager [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Refreshing instance network info cache due to event network-changed-9a92d7a7-73b6-4bd0-b812-3af4be317ae5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1041.544923] env[69992]: DEBUG oslo_concurrency.lockutils [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] Acquiring lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.544923] env[69992]: DEBUG oslo_concurrency.lockutils [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] Acquired lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.544923] env[69992]: DEBUG nova.network.neutron [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Refreshing network info cache for port 9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.640538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "b3d62400-e639-4c49-9207-64fd1e684f99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.643362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "b3d62400-e639-4c49-9207-64fd1e684f99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.643362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "b3d62400-e639-4c49-9207-64fd1e684f99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.643362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "b3d62400-e639-4c49-9207-64fd1e684f99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.643362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "b3d62400-e639-4c49-9207-64fd1e684f99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.652181] env[69992]: INFO nova.compute.manager [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Terminating instance [ 1041.696781] env[69992]: DEBUG nova.network.neutron [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Successfully created port: 47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1041.745388] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52afba3c-d69d-162b-8012-22fbfb38db11, 'name': SearchDatastore_Task, 'duration_secs': 0.010942} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.748210] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.748509] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.748677] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.748957] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.749221] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.752251] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38921c3a-0d22-480f-bc26-0c5a7a50d504 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.756369] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1041.763295] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.763501] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.764328] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d675af5-f8ef-46b1-a57b-058c3ce7bd63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.781391] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1041.781391] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a19a06-5608-1ee7-4ea5-4c0b8df9ddbd" [ 1041.781391] env[69992]: _type = "Task" [ 1041.781391] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.791557] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a19a06-5608-1ee7-4ea5-4c0b8df9ddbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.796374] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.948520] env[69992]: DEBUG nova.network.neutron [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updated VIF entry in instance network info cache for port e4c81d0e-1575-49d0-98f5-9fd01f35158c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.948871] env[69992]: DEBUG nova.network.neutron [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updating instance_info_cache with network_info: [{"id": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "address": "fa:16:3e:41:02:53", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c81d0e-15", "ovs_interfaceid": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.013929] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd72cf5c-706f-4d29-b57b-9411dad7598d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.022988] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f9bdfe-9caf-416d-8604-32aa24426735 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.056492] env[69992]: DEBUG nova.network.neutron [-] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.062092] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662b1159-bbbf-4969-874c-8898ce736d3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.071939] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9071904-78f0-4846-9f89-2e7b8c2fac83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.094385] env[69992]: DEBUG nova.compute.provider_tree [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1042.166189] env[69992]: DEBUG nova.compute.manager [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1042.167058] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1042.168318] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a75873-0e73-4d6f-ac2e-f2cd2aeea5af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.181895] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.182400] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5fd3d4a-c2da-4e51-b8cd-dea6e2fabe08 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.191976] env[69992]: DEBUG oslo_vmware.api [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1042.191976] env[69992]: value = "task-2897137" [ 1042.191976] env[69992]: _type = "Task" [ 1042.191976] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.206680] env[69992]: DEBUG oslo_vmware.api [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.298614] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a19a06-5608-1ee7-4ea5-4c0b8df9ddbd, 'name': SearchDatastore_Task, 'duration_secs': 0.011071} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.298614] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d41ea92f-d318-4e2c-9036-d098100d9539 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.305369] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1042.305369] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5267b65b-b416-79f6-e42d-3b91fc09eab2" [ 1042.305369] env[69992]: _type = "Task" [ 1042.305369] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.316921] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5267b65b-b416-79f6-e42d-3b91fc09eab2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.453143] env[69992]: DEBUG oslo_concurrency.lockutils [req-024ce7e0-1fe1-44b7-9efd-e6ee2ba5c1cd req-fbaba352-0fb8-4625-a4de-1840d0efbb06 service nova] Releasing lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.496404] env[69992]: DEBUG nova.network.neutron [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updated VIF entry in instance network info cache for port 9a92d7a7-73b6-4bd0-b812-3af4be317ae5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.496671] env[69992]: DEBUG nova.network.neutron [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updating instance_info_cache with network_info: [{"id": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "address": "fa:16:3e:18:c5:72", "network": {"id": "0b5f4ed2-224e-46b4-8409-8afd9b110869", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1025311448-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfe09ae17ec5434ab3b8f4d7ab5d0cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a92d7a7-73", "ovs_interfaceid": "9a92d7a7-73b6-4bd0-b812-3af4be317ae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.563990] env[69992]: INFO nova.compute.manager [-] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Took 1.45 seconds to deallocate network for instance. [ 1042.618643] env[69992]: ERROR nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [req-7b8f049f-ca33-4070-a504-f0c3c3cbbca9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b8f049f-ca33-4070-a504-f0c3c3cbbca9"}]} [ 1042.638190] env[69992]: DEBUG nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1042.653608] env[69992]: DEBUG nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1042.653897] env[69992]: DEBUG nova.compute.provider_tree [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1042.669054] env[69992]: DEBUG nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1042.688963] env[69992]: DEBUG nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1042.704666] env[69992]: DEBUG oslo_vmware.api [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897137, 'name': PowerOffVM_Task, 'duration_secs': 0.319963} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.704954] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.705161] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1042.705412] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65496ad3-e5e9-45bc-9a77-d3df41702f87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.768026] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1042.785095] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1042.785095] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1042.785095] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Deleting the datastore file [datastore1] b3d62400-e639-4c49-9207-64fd1e684f99 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.787268] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73626d32-f8e7-45c5-afc5-4b457d8b5d3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.797785] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:47:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b7d7a3d8-e1c4-4412-993d-af11150bffcc',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-318102732',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1042.798061] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.798203] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1042.798385] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.798529] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1042.798715] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1042.798879] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1042.799056] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1042.799244] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1042.799409] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1042.799645] env[69992]: DEBUG nova.virt.hardware [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1042.800564] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda811fa-7629-46e3-ab01-dfe1c8b9e4ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.804448] env[69992]: DEBUG oslo_vmware.api [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for the task: (returnval){ [ 1042.804448] env[69992]: value = "task-2897139" [ 1042.804448] env[69992]: _type = "Task" [ 1042.804448] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.817240] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9662ae-a3fb-4140-9809-fc5cc8826a1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.824275] env[69992]: DEBUG oslo_vmware.api [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897139, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.827622] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.828030] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.846789] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5267b65b-b416-79f6-e42d-3b91fc09eab2, 'name': SearchDatastore_Task, 'duration_secs': 0.01193} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.847485] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.847782] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a06d4b38-0e39-46ef-a588-7627661cb201/a06d4b38-0e39-46ef-a588-7627661cb201.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.848050] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00acf163-0e5d-440c-b159-a549508791c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.857728] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1042.857728] env[69992]: value = "task-2897140" [ 1042.857728] env[69992]: _type = "Task" [ 1042.857728] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.871776] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.000136] env[69992]: DEBUG oslo_concurrency.lockutils [req-3de3a178-42af-47be-a5e7-0489650f975f req-1b52898d-31e5-4aa9-9ddd-304236d3bbd3 service nova] Releasing lock "refresh_cache-7fc7c481-75e8-40f2-a971-752ce6dde59b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.071318] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.126692] env[69992]: DEBUG nova.compute.manager [req-9c4432ff-0140-409f-a86d-dfb4e5468fb3 req-0c33d9f0-75f2-4046-8059-ae2a7e802d16 service nova] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Received event network-vif-deleted-a50c02e4-9a37-4f83-8d66-8afea64e2bc5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.251735] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3541050e-df83-45d2-be72-cb94eacbda04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.262624] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97385f64-588a-472f-abee-b3f88a51a7c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.299669] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04fc34d-d20b-4068-95eb-3ce7d5d4d8b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.313803] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb93e656-37d1-4fb7-8e7b-44c9868da829 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.321908] env[69992]: DEBUG oslo_vmware.api [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Task: {'id': task-2897139, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218754} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.322646] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.322849] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1043.323066] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1043.323259] env[69992]: INFO nova.compute.manager [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1043.323505] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1043.323697] env[69992]: DEBUG nova.compute.manager [-] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1043.323794] env[69992]: DEBUG nova.network.neutron [-] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1043.335046] env[69992]: DEBUG nova.compute.provider_tree [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1043.348438] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.348715] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.348715] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.349426] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.349426] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.349426] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.349800] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1043.349800] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.376569] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897140, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.554083] env[69992]: DEBUG nova.network.neutron [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Successfully updated port: 47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.652393] env[69992]: DEBUG nova.compute.manager [req-e8063c69-28f3-44e4-8c16-e34745c0674f req-a9563298-65f7-45fc-afdb-e4a38a5cffe0 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Received event network-vif-plugged-47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.652393] env[69992]: DEBUG oslo_concurrency.lockutils [req-e8063c69-28f3-44e4-8c16-e34745c0674f req-a9563298-65f7-45fc-afdb-e4a38a5cffe0 service nova] Acquiring lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.652393] env[69992]: DEBUG oslo_concurrency.lockutils [req-e8063c69-28f3-44e4-8c16-e34745c0674f req-a9563298-65f7-45fc-afdb-e4a38a5cffe0 service nova] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.652719] env[69992]: DEBUG oslo_concurrency.lockutils [req-e8063c69-28f3-44e4-8c16-e34745c0674f req-a9563298-65f7-45fc-afdb-e4a38a5cffe0 service nova] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.653070] env[69992]: DEBUG nova.compute.manager [req-e8063c69-28f3-44e4-8c16-e34745c0674f req-a9563298-65f7-45fc-afdb-e4a38a5cffe0 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] No waiting events found dispatching network-vif-plugged-47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.653420] env[69992]: WARNING nova.compute.manager [req-e8063c69-28f3-44e4-8c16-e34745c0674f req-a9563298-65f7-45fc-afdb-e4a38a5cffe0 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Received unexpected event network-vif-plugged-47e06987-ed7c-4f19-8716-20716e1056c3 for instance with vm_state building and task_state spawning. [ 1043.853919] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.873152] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580994} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.874133] env[69992]: DEBUG nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1043.874395] env[69992]: DEBUG nova.compute.provider_tree [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 80 to 81 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1043.874585] env[69992]: DEBUG nova.compute.provider_tree [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1043.878207] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a06d4b38-0e39-46ef-a588-7627661cb201/a06d4b38-0e39-46ef-a588-7627661cb201.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.878446] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.878918] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0106b03-955b-49fe-99f8-c4e0bd505dd0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.887774] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1043.887774] env[69992]: value = "task-2897141" [ 1043.887774] env[69992]: _type = "Task" [ 1043.887774] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.898420] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.059541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.059541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.059541] env[69992]: DEBUG nova.network.neutron [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.229025] env[69992]: DEBUG nova.network.neutron [-] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.380702] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.635s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.383113] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.286s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.383348] env[69992]: DEBUG nova.objects.instance [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lazy-loading 'resources' on Instance uuid 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.398847] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078084} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.399156] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.399950] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e004165a-13e6-4c96-9dcb-05cd02ae6f14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.405545] env[69992]: INFO nova.scheduler.client.report [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleted allocations for instance d361769c-bfc2-4c72-83f4-dc9b51f907a3 [ 1044.425505] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] a06d4b38-0e39-46ef-a588-7627661cb201/a06d4b38-0e39-46ef-a588-7627661cb201.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.428117] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3974b751-3aa3-45ca-b0a9-c62b95af6cec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.452110] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1044.452110] env[69992]: value = "task-2897142" [ 1044.452110] env[69992]: _type = "Task" [ 1044.452110] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.460905] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.597808] env[69992]: DEBUG nova.network.neutron [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1044.693614] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-e5d9de80-1ee5-462a-8459-168fd60e1972-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.693885] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-e5d9de80-1ee5-462a-8459-168fd60e1972-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.694278] env[69992]: DEBUG nova.objects.instance [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'flavor' on Instance uuid e5d9de80-1ee5-462a-8459-168fd60e1972 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.732708] env[69992]: INFO nova.compute.manager [-] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Took 1.41 seconds to deallocate network for instance. [ 1044.797401] env[69992]: DEBUG nova.network.neutron [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.947665] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c352cc9-ebfd-4397-a16e-1ab2979091a4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "d361769c-bfc2-4c72-83f4-dc9b51f907a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.598s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.963317] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897142, 'name': ReconfigVM_Task, 'duration_secs': 0.355302} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.965487] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Reconfigured VM instance instance-00000031 to attach disk [datastore2] a06d4b38-0e39-46ef-a588-7627661cb201/a06d4b38-0e39-46ef-a588-7627661cb201.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.966878] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ae66346-648d-4584-82ba-b076469f532b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.975203] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1044.975203] env[69992]: value = "task-2897143" [ 1044.975203] env[69992]: _type = "Task" [ 1044.975203] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.985672] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897143, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.240685] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.300798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.301857] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Instance network_info: |[{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1045.301987] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:f9:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47e06987-ed7c-4f19-8716-20716e1056c3', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.310081] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.313070] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.314189] env[69992]: DEBUG nova.objects.instance [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'pci_requests' on Instance uuid e5d9de80-1ee5-462a-8459-168fd60e1972 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.315206] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-778e3234-758c-4fa3-9696-0b246ca3dd35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.341524] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.341524] env[69992]: value = "task-2897144" [ 1045.341524] env[69992]: _type = "Task" [ 1045.341524] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.353304] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897144, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.439624] env[69992]: DEBUG nova.compute.manager [req-ce15ef18-39f8-495f-a378-fd5f87ab515e req-94384659-a24f-4826-9d31-9c397b72ca3a service nova] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Received event network-vif-deleted-d93d2fb3-db84-4122-8820-bc39368ea460 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.456633] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9834096e-c0b2-4f44-9a98-d02ab5720b18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.465946] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129a4501-51e1-45db-93d3-0944a4dc0869 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.502480] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01627f27-9b7f-4ef4-a9a1-1189336be88d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.514035] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897143, 'name': Rename_Task, 'duration_secs': 0.164974} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.514454] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.515715] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b671b6c-6c66-452c-8c4e-b2ae9cb37f20 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.519664] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6358bbe5-0199-43c5-93b3-280f26bff974 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.534102] env[69992]: DEBUG nova.compute.provider_tree [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.537175] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1045.537175] env[69992]: value = "task-2897145" [ 1045.537175] env[69992]: _type = "Task" [ 1045.537175] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.547537] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897145, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.836045] env[69992]: DEBUG nova.objects.base [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1045.836162] env[69992]: DEBUG nova.network.neutron [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.854022] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897144, 'name': CreateVM_Task, 'duration_secs': 0.348129} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.854022] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.854022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.854022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.854022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.854022] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ffcf697-8dab-4572-9b17-cce5445b37c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.859862] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1045.859862] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5246ae67-4142-f6c7-f65a-6d49986cc841" [ 1045.859862] env[69992]: _type = "Task" [ 1045.859862] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.868867] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5246ae67-4142-f6c7-f65a-6d49986cc841, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.896188] env[69992]: DEBUG nova.policy [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1045.968754] env[69992]: DEBUG nova.compute.manager [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Received event network-changed-47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.968754] env[69992]: DEBUG nova.compute.manager [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Refreshing instance network info cache due to event network-changed-47e06987-ed7c-4f19-8716-20716e1056c3. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1045.968754] env[69992]: DEBUG oslo_concurrency.lockutils [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] Acquiring lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.968754] env[69992]: DEBUG oslo_concurrency.lockutils [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] Acquired lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.968754] env[69992]: DEBUG nova.network.neutron [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Refreshing network info cache for port 47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.039600] env[69992]: DEBUG nova.scheduler.client.report [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.056413] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897145, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.226502] env[69992]: DEBUG nova.network.neutron [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Successfully created port: 0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.371745] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5246ae67-4142-f6c7-f65a-6d49986cc841, 'name': SearchDatastore_Task, 'duration_secs': 0.011721} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.372071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.372310] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.372543] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.372688] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.372866] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.373170] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8533534e-aff2-4cbd-b2d2-509f2a012482 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.382705] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.382885] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.383590] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bee9992-9b87-4f23-a597-673271803007 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.389045] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1046.389045] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523f248e-13e1-fd43-1978-9eab1a9895be" [ 1046.389045] env[69992]: _type = "Task" [ 1046.389045] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.397273] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f248e-13e1-fd43-1978-9eab1a9895be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.550841] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.168s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.557292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.788s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.558957] env[69992]: INFO nova.compute.claims [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1046.562363] env[69992]: DEBUG oslo_vmware.api [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897145, 'name': PowerOnVM_Task, 'duration_secs': 0.554462} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.565053] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.565253] env[69992]: INFO nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Took 8.99 seconds to spawn the instance on the hypervisor. [ 1046.565438] env[69992]: DEBUG nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1046.566319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde4c5ad-45af-4583-91c8-d7513c28e670 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.578229] env[69992]: INFO nova.scheduler.client.report [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleted allocations for instance 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97 [ 1046.686170] env[69992]: DEBUG nova.network.neutron [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updated VIF entry in instance network info cache for port 47e06987-ed7c-4f19-8716-20716e1056c3. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.686542] env[69992]: DEBUG nova.network.neutron [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.900665] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f248e-13e1-fd43-1978-9eab1a9895be, 'name': SearchDatastore_Task, 'duration_secs': 0.008961} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.901522] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-926a17e6-94af-4f74-b359-63bd01b7eb7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.908316] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1046.908316] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a36312-0f97-9f8d-c3bb-d86e22687898" [ 1046.908316] env[69992]: _type = "Task" [ 1046.908316] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.917015] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a36312-0f97-9f8d-c3bb-d86e22687898, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.091381] env[69992]: INFO nova.compute.manager [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Took 41.47 seconds to build instance. [ 1047.092515] env[69992]: DEBUG oslo_concurrency.lockutils [None req-372795b7-9e23-45c1-877f-56ddbda292a9 tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "7fbab19d-5a0a-4da3-b078-40ca0eaf8c97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.973s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.189564] env[69992]: DEBUG oslo_concurrency.lockutils [req-531ba5b6-d1d2-4680-9fd6-6d4b016713e4 req-02d4e270-a12a-41cc-b6cc-29d2d567d971 service nova] Releasing lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.419822] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a36312-0f97-9f8d-c3bb-d86e22687898, 'name': SearchDatastore_Task, 'duration_secs': 0.010725} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.420061] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.420388] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.420668] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a38ac6f-1e2c-41d7-9854-018fe8ae81a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.430973] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1047.430973] env[69992]: value = "task-2897146" [ 1047.430973] env[69992]: _type = "Task" [ 1047.430973] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.441198] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.595135] env[69992]: DEBUG oslo_concurrency.lockutils [None req-571332e4-4d5d-4b73-8021-4a889e30a5dc tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "a06d4b38-0e39-46ef-a588-7627661cb201" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.852s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.897888] env[69992]: DEBUG nova.network.neutron [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Successfully updated port: 0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1047.945536] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897146, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.100179] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1048.130610] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902935a8-f842-4d0a-a632-981d176ec7f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.141135] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b8c359-9fdf-4ba0-b1e2-54f8297de67d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.146446] env[69992]: DEBUG nova.compute.manager [req-273f7d32-14f3-457e-8740-7d418ed4bb29 req-2ac46889-bf60-4c04-8779-10ab11ab238b service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-vif-plugged-0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.146672] env[69992]: DEBUG oslo_concurrency.lockutils [req-273f7d32-14f3-457e-8740-7d418ed4bb29 req-2ac46889-bf60-4c04-8779-10ab11ab238b service nova] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.146996] env[69992]: DEBUG oslo_concurrency.lockutils [req-273f7d32-14f3-457e-8740-7d418ed4bb29 req-2ac46889-bf60-4c04-8779-10ab11ab238b service nova] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.147226] env[69992]: DEBUG oslo_concurrency.lockutils [req-273f7d32-14f3-457e-8740-7d418ed4bb29 req-2ac46889-bf60-4c04-8779-10ab11ab238b service nova] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.147399] env[69992]: DEBUG nova.compute.manager [req-273f7d32-14f3-457e-8740-7d418ed4bb29 req-2ac46889-bf60-4c04-8779-10ab11ab238b service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] No waiting events found dispatching network-vif-plugged-0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1048.147564] env[69992]: WARNING nova.compute.manager [req-273f7d32-14f3-457e-8740-7d418ed4bb29 req-2ac46889-bf60-4c04-8779-10ab11ab238b service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received unexpected event network-vif-plugged-0229965f-d491-4e94-9f75-201dda751cd0 for instance with vm_state active and task_state None. [ 1048.177809] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242b0dc6-c279-4f53-b8c7-08ec5ba3c87e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.187682] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef22ab1-862a-4c8c-876e-fd30a9970939 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.203268] env[69992]: DEBUG nova.compute.provider_tree [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.401064] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.401275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.401461] env[69992]: DEBUG nova.network.neutron [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.445393] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897146, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5312} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.445693] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.445912] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.446190] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae57b5eb-e311-4541-965b-29471d56706e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.454436] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1048.454436] env[69992]: value = "task-2897147" [ 1048.454436] env[69992]: _type = "Task" [ 1048.454436] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.466019] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.620679] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.707205] env[69992]: DEBUG nova.scheduler.client.report [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.938548] env[69992]: WARNING nova.network.neutron [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] 7c8e9b14-bcc2-45f2-8b37-5f478b75057e already exists in list: networks containing: ['7c8e9b14-bcc2-45f2-8b37-5f478b75057e']. ignoring it [ 1048.967016] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073357} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.967334] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.968551] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82fbbe6-cf3f-4e88-9a46-9e409c661990 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.994746] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.997916] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13467040-e02e-4e05-87d8-10446019c0e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.023031] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1049.023031] env[69992]: value = "task-2897148" [ 1049.023031] env[69992]: _type = "Task" [ 1049.023031] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.031842] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.215519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.215849] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1049.220084] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.985s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.222380] env[69992]: INFO nova.compute.claims [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1049.373982] env[69992]: DEBUG nova.network.neutron [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0229965f-d491-4e94-9f75-201dda751cd0", "address": "fa:16:3e:a0:6c:eb", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0229965f-d4", "ovs_interfaceid": "0229965f-d491-4e94-9f75-201dda751cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.413462] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.413730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.413945] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.414178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.414319] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.416613] env[69992]: INFO nova.compute.manager [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Terminating instance [ 1049.534567] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897148, 'name': ReconfigVM_Task, 'duration_secs': 0.472743} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.534894] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.535613] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08ad46d2-4385-4099-ae08-70efd05e55ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.544738] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1049.544738] env[69992]: value = "task-2897149" [ 1049.544738] env[69992]: _type = "Task" [ 1049.544738] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.554122] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897149, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.728954] env[69992]: DEBUG nova.compute.utils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1049.733193] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1049.733427] env[69992]: DEBUG nova.network.neutron [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1049.801257] env[69992]: DEBUG nova.policy [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb11c1eaf21c48ce874a62e8204e1680', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6555d1831c04485b62e06f8579f389b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1049.876554] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.877293] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.877501] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.878481] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2740f9-8f24-4664-8038-95a28a13c3aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.898712] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1049.898964] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.899145] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1049.899333] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.899521] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1049.899698] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1049.899909] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1049.900083] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1049.900258] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1049.900468] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1049.900695] env[69992]: DEBUG nova.virt.hardware [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1049.907521] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Reconfiguring VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1049.907848] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9208a0a-fa70-477a-8c1e-760fbb706f89 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.923526] env[69992]: DEBUG nova.compute.manager [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.923526] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.923526] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4e3d51-0929-4a24-812d-6487e1dc6f59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.926950] env[69992]: DEBUG oslo_vmware.api [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1049.926950] env[69992]: value = "task-2897150" [ 1049.926950] env[69992]: _type = "Task" [ 1049.926950] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.932892] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.935858] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61ed5fe8-38f6-4901-891e-888b2f7c6b05 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.937890] env[69992]: DEBUG oslo_vmware.api [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897150, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.955366] env[69992]: DEBUG oslo_vmware.api [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 1049.955366] env[69992]: value = "task-2897151" [ 1049.955366] env[69992]: _type = "Task" [ 1049.955366] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.964968] env[69992]: DEBUG oslo_vmware.api [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.062472] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897149, 'name': Rename_Task, 'duration_secs': 0.210357} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.062792] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.063106] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05e1971f-d693-4d9c-acc3-65764a091ad5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.074078] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1050.074078] env[69992]: value = "task-2897152" [ 1050.074078] env[69992]: _type = "Task" [ 1050.074078] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.084874] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.233891] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1050.307896] env[69992]: DEBUG nova.network.neutron [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Successfully created port: 3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1050.439207] env[69992]: DEBUG oslo_vmware.api [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.466861] env[69992]: DEBUG oslo_vmware.api [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897151, 'name': PowerOffVM_Task, 'duration_secs': 0.29164} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.469701] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.469974] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.470440] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fed7eca2-3f14-40be-ba37-15be4ecb3a53 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.585647] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897152, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.623784] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520397a3-17da-7cd0-912c-a4ae2686050e/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1050.624820] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074dcfc2-2483-4b2c-a2ad-e6bcf19df15a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.637721] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520397a3-17da-7cd0-912c-a4ae2686050e/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1050.637721] env[69992]: ERROR oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520397a3-17da-7cd0-912c-a4ae2686050e/disk-0.vmdk due to incomplete transfer. [ 1050.637721] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7c93397b-8474-44af-a517-25783935255d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.650532] env[69992]: DEBUG oslo_vmware.rw_handles [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520397a3-17da-7cd0-912c-a4ae2686050e/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1050.650630] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Uploaded image b6861dfe-a1bf-4ef6-9649-24057f9f9e98 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1050.652918] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1050.653225] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1570fdb6-18cf-4a53-b860-fb410f56181a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.660389] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1050.660389] env[69992]: value = "task-2897154" [ 1050.660389] env[69992]: _type = "Task" [ 1050.660389] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.671585] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897154, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.775842] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f488c3dc-0337-490a-b529-b9df1f6a5527 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.785022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162d701e-1044-40dd-a8a9-9f760e8d7fca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.825265] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df897415-d662-4978-ac9b-e6e891803c4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.828026] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.828240] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.828458] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleting the datastore file [datastore1] 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.828742] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d8478cc-b6c8-4ccc-a13e-d6d77ca43e7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.838182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4b0956-e8fd-467f-9ae6-4d106de3768a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.842456] env[69992]: DEBUG oslo_vmware.api [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for the task: (returnval){ [ 1050.842456] env[69992]: value = "task-2897155" [ 1050.842456] env[69992]: _type = "Task" [ 1050.842456] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.854253] env[69992]: DEBUG nova.compute.provider_tree [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.862034] env[69992]: DEBUG oslo_vmware.api [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897155, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.956092] env[69992]: DEBUG oslo_vmware.api [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.068936] env[69992]: DEBUG nova.compute.manager [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-changed-0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.069058] env[69992]: DEBUG nova.compute.manager [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Refreshing instance network info cache due to event network-changed-0229965f-d491-4e94-9f75-201dda751cd0. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1051.069391] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Acquiring lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.069785] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Acquired lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.069921] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Refreshing network info cache for port 0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1051.087180] env[69992]: DEBUG oslo_vmware.api [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897152, 'name': PowerOnVM_Task, 'duration_secs': 0.98619} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.087445] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1051.087652] env[69992]: INFO nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1051.087856] env[69992]: DEBUG nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.088667] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd34939c-9286-4e32-9c32-130ea57ce110 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.170738] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897154, 'name': Destroy_Task, 'duration_secs': 0.412113} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.171006] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroyed the VM [ 1051.171250] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1051.171813] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4105d622-163d-4f8e-9b70-a9f2d3f88e1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.179990] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1051.179990] env[69992]: value = "task-2897156" [ 1051.179990] env[69992]: _type = "Task" [ 1051.179990] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.189768] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897156, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.247644] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1051.275501] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1051.275776] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.275958] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1051.276174] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.276376] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1051.276566] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1051.277251] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1051.277251] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1051.277251] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1051.277491] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1051.277736] env[69992]: DEBUG nova.virt.hardware [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1051.278762] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3416ad-32f4-456d-a97f-db674ffbaca6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.290551] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f1bcce-caae-4e5a-867a-592655093f72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.352170] env[69992]: DEBUG oslo_vmware.api [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Task: {'id': task-2897155, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192836} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.352452] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.352641] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.352853] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.353063] env[69992]: INFO nova.compute.manager [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Took 1.43 seconds to destroy the instance on the hypervisor. [ 1051.353315] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.353509] env[69992]: DEBUG nova.compute.manager [-] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.353601] env[69992]: DEBUG nova.network.neutron [-] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.357434] env[69992]: DEBUG nova.scheduler.client.report [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.445291] env[69992]: DEBUG oslo_vmware.api [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897150, 'name': ReconfigVM_Task, 'duration_secs': 1.449121} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.447715] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.447935] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Reconfigured VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1051.610198] env[69992]: INFO nova.compute.manager [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Took 43.79 seconds to build instance. [ 1051.690933] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897156, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.829956] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updated VIF entry in instance network info cache for port 0229965f-d491-4e94-9f75-201dda751cd0. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1051.830417] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0229965f-d491-4e94-9f75-201dda751cd0", "address": "fa:16:3e:a0:6c:eb", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0229965f-d4", "ovs_interfaceid": "0229965f-d491-4e94-9f75-201dda751cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.862592] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.863177] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1051.866330] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.786s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.867771] env[69992]: INFO nova.compute.claims [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.955234] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1727b303-fd21-4a70-8398-c6db9864cea8 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-e5d9de80-1ee5-462a-8459-168fd60e1972-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.259s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.019713] env[69992]: DEBUG nova.network.neutron [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Successfully updated port: 3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1052.115626] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c6f87c34-557c-4bcc-8745-05f0dae9c906 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.310s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.191169] env[69992]: DEBUG oslo_vmware.api [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897156, 'name': RemoveSnapshot_Task, 'duration_secs': 0.828154} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.191428] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1052.191655] env[69992]: INFO nova.compute.manager [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 16.16 seconds to snapshot the instance on the hypervisor. [ 1052.194092] env[69992]: DEBUG nova.network.neutron [-] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.334341] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Releasing lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.334665] env[69992]: DEBUG nova.compute.manager [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Received event network-changed-e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.334774] env[69992]: DEBUG nova.compute.manager [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Refreshing instance network info cache due to event network-changed-e4c81d0e-1575-49d0-98f5-9fd01f35158c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1052.334992] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Acquiring lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.335151] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Acquired lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.335322] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Refreshing network info cache for port e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.378382] env[69992]: DEBUG nova.compute.utils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1052.382227] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1052.382227] env[69992]: DEBUG nova.network.neutron [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.425208] env[69992]: DEBUG nova.policy [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfb2b2303d6448da9043701c396a2b4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02824f4021a5400583cf13cd553207fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1052.520053] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "refresh_cache-06442c68-7dc6-46a1-9e35-34a62730a555" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.520218] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquired lock "refresh_cache-06442c68-7dc6-46a1-9e35-34a62730a555" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.520364] env[69992]: DEBUG nova.network.neutron [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.600776] env[69992]: DEBUG nova.compute.manager [req-b18041af-e1aa-426f-8e26-47f20545bb6b req-636a0e8a-28cf-43d6-9ce5-c07876f9734f service nova] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Received event network-vif-deleted-be91de4c-766f-4a66-b07b-2dd3cbe88350 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.618657] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1052.696749] env[69992]: INFO nova.compute.manager [-] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Took 1.34 seconds to deallocate network for instance. [ 1052.760483] env[69992]: DEBUG nova.compute.manager [None req-97245856-ad0b-4de7-bdcd-bdf6fee93cc6 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Found 2 images (rotation: 2) {{(pid=69992) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1052.842543] env[69992]: DEBUG nova.network.neutron [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Successfully created port: 3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1052.886132] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1053.076172] env[69992]: DEBUG nova.network.neutron [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1053.119221] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updated VIF entry in instance network info cache for port e4c81d0e-1575-49d0-98f5-9fd01f35158c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1053.119221] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updating instance_info_cache with network_info: [{"id": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "address": "fa:16:3e:41:02:53", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c81d0e-15", "ovs_interfaceid": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.150883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.209109] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.227353] env[69992]: DEBUG nova.network.neutron [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Updating instance_info_cache with network_info: [{"id": "3b8fa629-0413-47b5-9a6d-7b64d336638d", "address": "fa:16:3e:ed:42:96", "network": {"id": "21392731-1137-40a9-aef3-11c1bd8395cb", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1774305032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6555d1831c04485b62e06f8579f389b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b8fa629-04", "ovs_interfaceid": "3b8fa629-0413-47b5-9a6d-7b64d336638d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.415309] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85caba0a-d283-4ff0-9eb3-9a076feb50a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.427192] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7431b93-61a7-4526-bacb-ad3d3d41f3e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.461587] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6685cfdb-2c0c-4dc9-a2fb-afe98c745e51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.468047] env[69992]: DEBUG nova.compute.manager [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Received event network-vif-plugged-3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.468309] env[69992]: DEBUG oslo_concurrency.lockutils [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] Acquiring lock "06442c68-7dc6-46a1-9e35-34a62730a555-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.468788] env[69992]: DEBUG oslo_concurrency.lockutils [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] Lock "06442c68-7dc6-46a1-9e35-34a62730a555-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.468975] env[69992]: DEBUG oslo_concurrency.lockutils [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] Lock "06442c68-7dc6-46a1-9e35-34a62730a555-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.469185] env[69992]: DEBUG nova.compute.manager [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] No waiting events found dispatching network-vif-plugged-3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1053.469372] env[69992]: WARNING nova.compute.manager [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Received unexpected event network-vif-plugged-3b8fa629-0413-47b5-9a6d-7b64d336638d for instance with vm_state building and task_state spawning. [ 1053.469536] env[69992]: DEBUG nova.compute.manager [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Received event network-changed-3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.469814] env[69992]: DEBUG nova.compute.manager [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Refreshing instance network info cache due to event network-changed-3b8fa629-0413-47b5-9a6d-7b64d336638d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1053.469887] env[69992]: DEBUG oslo_concurrency.lockutils [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] Acquiring lock "refresh_cache-06442c68-7dc6-46a1-9e35-34a62730a555" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.474172] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0faa47-1cfb-4cd8-bbe3-5fb69bb85414 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.490106] env[69992]: DEBUG nova.compute.provider_tree [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.622332] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Releasing lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.622586] env[69992]: DEBUG nova.compute.manager [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Received event network-changed-e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.622754] env[69992]: DEBUG nova.compute.manager [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Refreshing instance network info cache due to event network-changed-e4c81d0e-1575-49d0-98f5-9fd01f35158c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1053.622959] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Acquiring lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.623109] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Acquired lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.623268] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Refreshing network info cache for port e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1053.731033] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Releasing lock "refresh_cache-06442c68-7dc6-46a1-9e35-34a62730a555" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.731033] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Instance network_info: |[{"id": "3b8fa629-0413-47b5-9a6d-7b64d336638d", "address": "fa:16:3e:ed:42:96", "network": {"id": "21392731-1137-40a9-aef3-11c1bd8395cb", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1774305032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6555d1831c04485b62e06f8579f389b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b8fa629-04", "ovs_interfaceid": "3b8fa629-0413-47b5-9a6d-7b64d336638d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1053.731033] env[69992]: DEBUG oslo_concurrency.lockutils [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] Acquired lock "refresh_cache-06442c68-7dc6-46a1-9e35-34a62730a555" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.732102] env[69992]: DEBUG nova.network.neutron [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Refreshing network info cache for port 3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1053.733779] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:42:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4f91f31-0516-4d62-a341-e03a50b7c477', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b8fa629-0413-47b5-9a6d-7b64d336638d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.742044] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Creating folder: Project (d6555d1831c04485b62e06f8579f389b). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1053.745994] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab230b26-4fc5-4d60-959f-2798e46cdd6f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.820024] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Created folder: Project (d6555d1831c04485b62e06f8579f389b) in parent group-v581821. [ 1053.820934] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Creating folder: Instances. Parent ref: group-v581975. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1053.821410] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-623e7b5c-400f-4ba4-b33e-a5bfad2526b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.833238] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Created folder: Instances in parent group-v581975. [ 1053.833506] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.833708] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1053.833931] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f7cdba4-7246-4659-87ac-cd1feb3405f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.858351] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.858351] env[69992]: value = "task-2897159" [ 1053.858351] env[69992]: _type = "Task" [ 1053.858351] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.868103] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897159, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.898617] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1053.948882] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1053.949196] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.949369] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.949590] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.949786] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.949983] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1053.950508] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1053.950771] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1053.950997] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1053.951196] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1053.951377] env[69992]: DEBUG nova.virt.hardware [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1053.953305] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adbea19-dcfa-40cb-ac84-16abbb115f1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.963282] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789c1054-31fb-4e45-ae8e-36f7b7471f68 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.993150] env[69992]: DEBUG nova.scheduler.client.report [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.067283] env[69992]: DEBUG nova.network.neutron [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Updated VIF entry in instance network info cache for port 3b8fa629-0413-47b5-9a6d-7b64d336638d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.071051] env[69992]: DEBUG nova.network.neutron [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Updating instance_info_cache with network_info: [{"id": "3b8fa629-0413-47b5-9a6d-7b64d336638d", "address": "fa:16:3e:ed:42:96", "network": {"id": "21392731-1137-40a9-aef3-11c1bd8395cb", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1774305032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6555d1831c04485b62e06f8579f389b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b8fa629-04", "ovs_interfaceid": "3b8fa629-0413-47b5-9a6d-7b64d336638d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.365547] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updated VIF entry in instance network info cache for port e4c81d0e-1575-49d0-98f5-9fd01f35158c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.365902] env[69992]: DEBUG nova.network.neutron [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updating instance_info_cache with network_info: [{"id": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "address": "fa:16:3e:41:02:53", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c81d0e-15", "ovs_interfaceid": "e4c81d0e-1575-49d0-98f5-9fd01f35158c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.370748] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897159, 'name': CreateVM_Task, 'duration_secs': 0.390831} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.370991] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1054.371457] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.371706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.372165] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1054.372436] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9069cbe-3190-45c5-95f4-1efe9f968cd2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.378096] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1054.378096] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526fd803-b053-5bdb-ee2c-1d922b0ff4d6" [ 1054.378096] env[69992]: _type = "Task" [ 1054.378096] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.387724] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526fd803-b053-5bdb-ee2c-1d922b0ff4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.454553] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.454820] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.455044] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.455237] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.455410] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.463509] env[69992]: INFO nova.compute.manager [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Terminating instance [ 1054.498323] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.498737] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1054.501290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.280s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.501518] env[69992]: DEBUG nova.objects.instance [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'resources' on Instance uuid 62936d27-5405-4d29-b3ff-c4d8a74ba440 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.573100] env[69992]: DEBUG oslo_concurrency.lockutils [req-32b9c7dc-0b22-4601-9adb-6db2ce94e8ba req-6bc75508-33d2-4106-a329-e2ff322ddca8 service nova] Releasing lock "refresh_cache-06442c68-7dc6-46a1-9e35-34a62730a555" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.641044] env[69992]: DEBUG nova.compute.manager [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.641044] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c582c694-3d8e-410d-b5d6-80f484a43b10 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.697374] env[69992]: DEBUG nova.network.neutron [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Successfully updated port: 3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1054.872155] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b0a34f0-0493-47a0-ba8d-efe69a4cb762 req-93202b92-d7ed-4fa7-ab9c-1e0a40fd1c70 service nova] Releasing lock "refresh_cache-a06d4b38-0e39-46ef-a588-7627661cb201" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.905249] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526fd803-b053-5bdb-ee2c-1d922b0ff4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010508} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.905489] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.906086] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1054.906086] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.906086] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.906281] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.906517] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-146267e6-1445-40f1-a7e2-9fe98fb60f90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.917477] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.917625] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1054.918379] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa6f5926-f5ec-4916-b15d-5bac603273b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.926056] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1054.926056] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6b7fd-8700-b2a6-e901-b550f5ec794e" [ 1054.926056] env[69992]: _type = "Task" [ 1054.926056] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.934717] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6b7fd-8700-b2a6-e901-b550f5ec794e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.964804] env[69992]: DEBUG nova.compute.manager [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1054.965061] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.965970] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef45e1e2-85ba-4a14-9ca4-4bf7a94a5195 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.974915] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.975190] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98a6ee24-2ce4-4a72-85a6-ce72db3ed22f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.984017] env[69992]: DEBUG oslo_vmware.api [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1054.984017] env[69992]: value = "task-2897160" [ 1054.984017] env[69992]: _type = "Task" [ 1054.984017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.993583] env[69992]: DEBUG oslo_vmware.api [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.007556] env[69992]: DEBUG nova.compute.utils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.009193] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1055.009360] env[69992]: DEBUG nova.network.neutron [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.045880] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-e5d9de80-1ee5-462a-8459-168fd60e1972-0229965f-d491-4e94-9f75-201dda751cd0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.045880] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-e5d9de80-1ee5-462a-8459-168fd60e1972-0229965f-d491-4e94-9f75-201dda751cd0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.096053] env[69992]: DEBUG nova.compute.manager [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1055.116263] env[69992]: DEBUG nova.policy [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ed40174f9f948b8851e01291f778049', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0183165a87814cc28924a3bf6c07f171', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1055.152674] env[69992]: INFO nova.compute.manager [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] instance snapshotting [ 1055.153175] env[69992]: DEBUG nova.objects.instance [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'flavor' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.201162] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.201354] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.201458] env[69992]: DEBUG nova.network.neutron [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.390504] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.390746] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.390955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.391161] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.391334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.397876] env[69992]: INFO nova.compute.manager [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Terminating instance [ 1055.438131] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6b7fd-8700-b2a6-e901-b550f5ec794e, 'name': SearchDatastore_Task, 'duration_secs': 0.01093} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.438995] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e821cbf3-74fa-4d46-8f08-176eb3489e9c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.451282] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1055.451282] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a31abb-b66d-79c3-8737-eb6754622a4a" [ 1055.451282] env[69992]: _type = "Task" [ 1055.451282] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.461481] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a31abb-b66d-79c3-8737-eb6754622a4a, 'name': SearchDatastore_Task, 'duration_secs': 0.011262} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.461925] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.462276] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 06442c68-7dc6-46a1-9e35-34a62730a555/06442c68-7dc6-46a1-9e35-34a62730a555.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1055.462654] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c2862b6-256b-4eb5-a1bf-feaafb9f8a7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.475379] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1055.475379] env[69992]: value = "task-2897161" [ 1055.475379] env[69992]: _type = "Task" [ 1055.475379] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.490983] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.497620] env[69992]: DEBUG oslo_vmware.api [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897160, 'name': PowerOffVM_Task, 'duration_secs': 0.247293} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.497906] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.498568] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.502019] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30e2f9b4-d6a1-473b-9203-a0d207d9913c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.512118] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1055.547249] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.547249] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.548456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c9c0e3-dd8f-422d-87c7-3a826b0382bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.584827] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3780e3d-479c-46ed-a5b4-14ffe14dafd9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.587829] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.591114] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.591397] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleting the datastore file [datastore2] f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.591897] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab44d27d-f386-49e5-936e-764eb0a7d40e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.623807] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Reconfiguring VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1055.627923] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-675cb6c8-6cb3-4ce0-b7da-4ca3aacbf916 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.640909] env[69992]: DEBUG oslo_vmware.api [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1055.640909] env[69992]: value = "task-2897163" [ 1055.640909] env[69992]: _type = "Task" [ 1055.640909] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.641967] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.650108] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1055.650108] env[69992]: value = "task-2897164" [ 1055.650108] env[69992]: _type = "Task" [ 1055.650108] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.653681] env[69992]: DEBUG oslo_vmware.api [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.660659] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021a1f31-dd4a-4403-a7f3-94f234c203c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.664259] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a61622-a4da-4591-852a-7126c5101c4d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.672734] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.691788] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f052fea-5fe0-41aa-86be-a74b0afae14c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.696169] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1eddf9a-f0e5-4d8c-ada5-483bc11ae78f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.702199] env[69992]: DEBUG nova.compute.manager [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Received event network-vif-plugged-3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1055.702435] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] Acquiring lock "673be00f-e3c5-4a54-beeb-cf89828e9e32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.702672] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.702829] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.702997] env[69992]: DEBUG nova.compute.manager [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] No waiting events found dispatching network-vif-plugged-3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1055.703176] env[69992]: WARNING nova.compute.manager [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Received unexpected event network-vif-plugged-3479d475-b805-49db-a031-c31a6724c10d for instance with vm_state building and task_state spawning. [ 1055.703335] env[69992]: DEBUG nova.compute.manager [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Received event network-changed-3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1055.703489] env[69992]: DEBUG nova.compute.manager [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Refreshing instance network info cache due to event network-changed-3479d475-b805-49db-a031-c31a6724c10d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1055.703654] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] Acquiring lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.704550] env[69992]: DEBUG nova.network.neutron [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Successfully created port: 66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1055.757651] env[69992]: DEBUG nova.network.neutron [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1055.760898] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c854b519-0bc4-457b-be9b-e3a5fe8d4218 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.773387] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9740b6-2799-4ecb-b439-b806aeb0d1fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.790450] env[69992]: DEBUG nova.compute.provider_tree [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.905215] env[69992]: DEBUG nova.compute.manager [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.905488] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.906436] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d3566b-7bb3-4af1-bbed-bcc47a0b7eca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.915115] env[69992]: DEBUG nova.network.neutron [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Updating instance_info_cache with network_info: [{"id": "3479d475-b805-49db-a031-c31a6724c10d", "address": "fa:16:3e:db:b1:08", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3479d475-b8", "ovs_interfaceid": "3479d475-b805-49db-a031-c31a6724c10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.919635] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.920225] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fcf2947-e57e-45ee-8916-701e97be296b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.929090] env[69992]: DEBUG oslo_vmware.api [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1055.929090] env[69992]: value = "task-2897165" [ 1055.929090] env[69992]: _type = "Task" [ 1055.929090] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.939384] env[69992]: DEBUG oslo_vmware.api [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.989786] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897161, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.161340] env[69992]: DEBUG oslo_vmware.api [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445295} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.162210] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.162413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.162621] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.162876] env[69992]: INFO nova.compute.manager [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1056.163155] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.163358] env[69992]: DEBUG nova.compute.manager [-] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.163445] env[69992]: DEBUG nova.network.neutron [-] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.168778] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.262253] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1056.262583] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-32fc61d9-c910-40b0-a317-ec6d1b292103 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.272047] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1056.272047] env[69992]: value = "task-2897166" [ 1056.272047] env[69992]: _type = "Task" [ 1056.272047] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.282483] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897166, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.293530] env[69992]: DEBUG nova.scheduler.client.report [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.421766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.422033] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Instance network_info: |[{"id": "3479d475-b805-49db-a031-c31a6724c10d", "address": "fa:16:3e:db:b1:08", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3479d475-b8", "ovs_interfaceid": "3479d475-b805-49db-a031-c31a6724c10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1056.422435] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] Acquired lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.422601] env[69992]: DEBUG nova.network.neutron [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Refreshing network info cache for port 3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1056.423926] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:b1:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3479d475-b805-49db-a031-c31a6724c10d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.431868] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.432460] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1056.435856] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56a3b8e2-40d3-4653-9fe3-fc4eaf89e152 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.460833] env[69992]: DEBUG oslo_vmware.api [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897165, 'name': PowerOffVM_Task, 'duration_secs': 0.287189} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.462218] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.462450] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.463239] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.463239] env[69992]: value = "task-2897167" [ 1056.463239] env[69992]: _type = "Task" [ 1056.463239] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.464050] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d3241b9-6a03-4573-a982-9d45ae93f692 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.477445] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897167, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.486189] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598844} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.486758] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 06442c68-7dc6-46a1-9e35-34a62730a555/06442c68-7dc6-46a1-9e35-34a62730a555.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1056.486758] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.486955] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09711065-d230-4324-b093-42dfb3e6b877 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.493911] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1056.493911] env[69992]: value = "task-2897169" [ 1056.493911] env[69992]: _type = "Task" [ 1056.493911] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.505725] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897169, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.524340] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1056.554203] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1056.554490] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.554659] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.554846] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.554996] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.555175] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1056.555392] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1056.555555] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1056.555731] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1056.555926] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1056.556142] env[69992]: DEBUG nova.virt.hardware [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1056.556481] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.556665] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.556839] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleting the datastore file [datastore2] 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.557673] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f63148-89f9-43a2-a117-af350353f3dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.560351] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f52198c7-c09d-46ec-b148-0bdbfef4977d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.568582] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d207a9-8c53-4a22-92b6-5c961c1b812e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.573646] env[69992]: DEBUG oslo_vmware.api [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for the task: (returnval){ [ 1056.573646] env[69992]: value = "task-2897170" [ 1056.573646] env[69992]: _type = "Task" [ 1056.573646] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.590618] env[69992]: DEBUG oslo_vmware.api [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.663926] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.695664] env[69992]: DEBUG nova.compute.manager [req-aab816f0-864f-4270-9ab0-04eef6c2b400 req-284ef600-26ed-4a9d-8498-1da7e5284991 service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Received event network-vif-deleted-bd5193f4-aa70-4668-af0b-696f84cf0080 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.695664] env[69992]: INFO nova.compute.manager [req-aab816f0-864f-4270-9ab0-04eef6c2b400 req-284ef600-26ed-4a9d-8498-1da7e5284991 service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Neutron deleted interface bd5193f4-aa70-4668-af0b-696f84cf0080; detaching it from the instance and deleting it from the info cache [ 1056.695664] env[69992]: DEBUG nova.network.neutron [req-aab816f0-864f-4270-9ab0-04eef6c2b400 req-284ef600-26ed-4a9d-8498-1da7e5284991 service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.783502] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897166, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.798300] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.801069] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.685s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.802637] env[69992]: INFO nova.compute.claims [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.825657] env[69992]: INFO nova.scheduler.client.report [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocations for instance 62936d27-5405-4d29-b3ff-c4d8a74ba440 [ 1056.955024] env[69992]: DEBUG nova.network.neutron [-] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.978911] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897167, 'name': CreateVM_Task, 'duration_secs': 0.370122} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.979578] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1056.980074] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.980258] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.980590] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1056.980864] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba1eab40-19c6-4107-a14e-075112c8dc8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.986175] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1056.986175] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a487fc-7947-3e55-8c8b-c7583dcbf47a" [ 1056.986175] env[69992]: _type = "Task" [ 1056.986175] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.995860] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a487fc-7947-3e55-8c8b-c7583dcbf47a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.004122] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897169, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105903} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.004407] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.005205] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e82c9a8-2154-4e05-bde1-9907a8983b41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.030357] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 06442c68-7dc6-46a1-9e35-34a62730a555/06442c68-7dc6-46a1-9e35-34a62730a555.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.030847] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40722dc9-1201-4161-8661-14d89296c7f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.059612] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1057.059612] env[69992]: value = "task-2897171" [ 1057.059612] env[69992]: _type = "Task" [ 1057.059612] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.071573] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897171, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.088834] env[69992]: DEBUG oslo_vmware.api [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Task: {'id': task-2897170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206271} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.089133] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.089329] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.089712] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.089712] env[69992]: INFO nova.compute.manager [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1057.089943] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.090164] env[69992]: DEBUG nova.compute.manager [-] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.090254] env[69992]: DEBUG nova.network.neutron [-] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.166874] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.200389] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0ecbda7-deba-42dc-91ba-9b4f7fc450b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.212732] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1960500c-896f-406a-8009-f984726e5435 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.254123] env[69992]: DEBUG nova.compute.manager [req-aab816f0-864f-4270-9ab0-04eef6c2b400 req-284ef600-26ed-4a9d-8498-1da7e5284991 service nova] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Detach interface failed, port_id=bd5193f4-aa70-4668-af0b-696f84cf0080, reason: Instance f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1057.255304] env[69992]: DEBUG nova.network.neutron [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Updated VIF entry in instance network info cache for port 3479d475-b805-49db-a031-c31a6724c10d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.255636] env[69992]: DEBUG nova.network.neutron [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Updating instance_info_cache with network_info: [{"id": "3479d475-b805-49db-a031-c31a6724c10d", "address": "fa:16:3e:db:b1:08", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3479d475-b8", "ovs_interfaceid": "3479d475-b805-49db-a031-c31a6724c10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.283264] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897166, 'name': CreateSnapshot_Task, 'duration_secs': 0.809837} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.283523] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1057.284326] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6608304-b91a-4984-a32f-c792d303de70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.336886] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b27052e0-2045-46d5-97c2-1b3c272eb80c tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "62936d27-5405-4d29-b3ff-c4d8a74ba440" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.943s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.458184] env[69992]: INFO nova.compute.manager [-] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Took 1.29 seconds to deallocate network for instance. [ 1057.497655] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a487fc-7947-3e55-8c8b-c7583dcbf47a, 'name': SearchDatastore_Task, 'duration_secs': 0.037542} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.497999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.498207] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.498443] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.498593] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.499065] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.499360] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbdf71a9-7424-4ae5-9edf-d2278f4f5e79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.508847] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.509039] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1057.509745] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-387cb86d-fab2-4a44-a140-988750a80297 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.516474] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1057.516474] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524e6777-d543-c7b7-5122-4923002f0836" [ 1057.516474] env[69992]: _type = "Task" [ 1057.516474] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.524215] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524e6777-d543-c7b7-5122-4923002f0836, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.571484] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.666021] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.758134] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a25be7a-5f32-4cda-8af8-b619d4f0febc req-f5513aef-b9fc-4866-961b-8c8b07ffdbaf service nova] Releasing lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.803034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1057.803367] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9d6c5ad1-5d5c-48de-b558-f183bdb8a402 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.813546] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1057.813546] env[69992]: value = "task-2897172" [ 1057.813546] env[69992]: _type = "Task" [ 1057.813546] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.822694] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897172, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.854946] env[69992]: DEBUG nova.network.neutron [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Successfully updated port: 66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.885746] env[69992]: DEBUG nova.network.neutron [-] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.967119] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.029387] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524e6777-d543-c7b7-5122-4923002f0836, 'name': SearchDatastore_Task, 'duration_secs': 0.010339} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.032962] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96ad4601-5985-4e7a-822d-a1f80e7c8350 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.039589] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1058.039589] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52eaaf81-dc7d-7127-57f1-60befd6e254f" [ 1058.039589] env[69992]: _type = "Task" [ 1058.039589] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.052662] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52eaaf81-dc7d-7127-57f1-60befd6e254f, 'name': SearchDatastore_Task, 'duration_secs': 0.010645} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.052842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.053167] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 673be00f-e3c5-4a54-beeb-cf89828e9e32/673be00f-e3c5-4a54-beeb-cf89828e9e32.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1058.053465] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e459ccf9-37e7-4b08-a00c-1f00a959bccc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.062694] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1058.062694] env[69992]: value = "task-2897173" [ 1058.062694] env[69992]: _type = "Task" [ 1058.062694] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.081663] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.082013] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897171, 'name': ReconfigVM_Task, 'duration_secs': 0.682373} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.083164] env[69992]: DEBUG nova.compute.manager [req-6a0b516d-5a31-47ab-bcda-58797c7ff28d req-da682182-4d67-4bc3-a1aa-661cd700786f service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Received event network-vif-plugged-66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.083435] env[69992]: DEBUG oslo_concurrency.lockutils [req-6a0b516d-5a31-47ab-bcda-58797c7ff28d req-da682182-4d67-4bc3-a1aa-661cd700786f service nova] Acquiring lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.083635] env[69992]: DEBUG oslo_concurrency.lockutils [req-6a0b516d-5a31-47ab-bcda-58797c7ff28d req-da682182-4d67-4bc3-a1aa-661cd700786f service nova] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.083829] env[69992]: DEBUG oslo_concurrency.lockutils [req-6a0b516d-5a31-47ab-bcda-58797c7ff28d req-da682182-4d67-4bc3-a1aa-661cd700786f service nova] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.084047] env[69992]: DEBUG nova.compute.manager [req-6a0b516d-5a31-47ab-bcda-58797c7ff28d req-da682182-4d67-4bc3-a1aa-661cd700786f service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] No waiting events found dispatching network-vif-plugged-66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1058.084247] env[69992]: WARNING nova.compute.manager [req-6a0b516d-5a31-47ab-bcda-58797c7ff28d req-da682182-4d67-4bc3-a1aa-661cd700786f service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Received unexpected event network-vif-plugged-66d406e0-6f68-43e7-ab80-d030bf95c7bb for instance with vm_state building and task_state spawning. [ 1058.084631] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 06442c68-7dc6-46a1-9e35-34a62730a555/06442c68-7dc6-46a1-9e35-34a62730a555.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.085694] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71ae2792-791a-42f6-9ae0-cc281372f938 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.097908] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1058.097908] env[69992]: value = "task-2897174" [ 1058.097908] env[69992]: _type = "Task" [ 1058.097908] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.110676] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897174, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.168666] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.325359] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897172, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.357771] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.357771] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquired lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.357936] env[69992]: DEBUG nova.network.neutron [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.381949] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53613a80-3549-484e-93e3-5bc8ecc95a4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.390726] env[69992]: INFO nova.compute.manager [-] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Took 1.30 seconds to deallocate network for instance. [ 1058.393507] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c564304c-9f9e-46b1-a4f9-7fcfa2ba8e73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.432755] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16f89d9-2873-455b-bb39-34e53c0f435f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.445484] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee87721-035d-4064-8944-3a5b745e7a39 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.460372] env[69992]: DEBUG nova.compute.provider_tree [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.577799] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897173, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509919} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.577799] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 673be00f-e3c5-4a54-beeb-cf89828e9e32/673be00f-e3c5-4a54-beeb-cf89828e9e32.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1058.578306] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1058.578306] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d112248e-f95a-45ec-9a7c-747cf04ed56f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.585520] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1058.585520] env[69992]: value = "task-2897175" [ 1058.585520] env[69992]: _type = "Task" [ 1058.585520] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.595713] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897175, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.609273] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897174, 'name': Rename_Task, 'duration_secs': 0.153646} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.609570] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1058.609829] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68a308bb-ec88-4909-adcc-e3e116ec545b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.620452] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1058.620452] env[69992]: value = "task-2897176" [ 1058.620452] env[69992]: _type = "Task" [ 1058.620452] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.629766] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897176, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.674025] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.827030] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897172, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.903400] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.904791] env[69992]: DEBUG nova.network.neutron [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1058.962998] env[69992]: DEBUG nova.scheduler.client.report [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.103471] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897175, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064681} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.103471] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1059.103471] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce113bd-b7d3-4dd5-9b78-da356966d199 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.135239] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 673be00f-e3c5-4a54-beeb-cf89828e9e32/673be00f-e3c5-4a54-beeb-cf89828e9e32.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1059.142751] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658399ec-d567-4b34-a801-f7423e72d950 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.178402] env[69992]: DEBUG nova.compute.manager [req-3fe31fa8-9f84-4633-b8d5-bfbc3ac3e93f req-8e1e917f-e095-48fa-932c-10aac6b2dd78 service nova] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Received event network-vif-deleted-3bb69960-1f9d-420f-957d-a590b9e5bd9d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.192511] env[69992]: DEBUG oslo_vmware.api [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897176, 'name': PowerOnVM_Task, 'duration_secs': 0.541225} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.194845] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1059.195176] env[69992]: INFO nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Took 7.95 seconds to spawn the instance on the hypervisor. [ 1059.195454] env[69992]: DEBUG nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.195949] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1059.195949] env[69992]: value = "task-2897177" [ 1059.195949] env[69992]: _type = "Task" [ 1059.195949] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.197013] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ee479d-a4ae-4253-a35f-15114b6b2163 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.211274] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.221782] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.252083] env[69992]: DEBUG nova.network.neutron [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Updating instance_info_cache with network_info: [{"id": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "address": "fa:16:3e:13:fa:bc", "network": {"id": "e41a3bc7-387a-4d51-b682-2bae9aa6bab0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-579698461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0183165a87814cc28924a3bf6c07f171", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d406e0-6f", "ovs_interfaceid": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.325633] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897172, 'name': CloneVM_Task, 'duration_secs': 1.396524} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.325902] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Created linked-clone VM from snapshot [ 1059.327028] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791bb758-9c00-47bd-b476-5e6e0f48f728 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.337622] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Uploading image 3e12f89e-654b-4bb0-ad8e-71e43e4eaf9a {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1059.363122] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1059.363122] env[69992]: value = "vm-581980" [ 1059.363122] env[69992]: _type = "VirtualMachine" [ 1059.363122] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1059.363761] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ceed7c5a-aa77-4192-a5f2-66af8009b1c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.372430] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease: (returnval){ [ 1059.372430] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1b151-090a-7524-9f3c-f07850bb7aef" [ 1059.372430] env[69992]: _type = "HttpNfcLease" [ 1059.372430] env[69992]: } obtained for exporting VM: (result){ [ 1059.372430] env[69992]: value = "vm-581980" [ 1059.372430] env[69992]: _type = "VirtualMachine" [ 1059.372430] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1059.372741] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the lease: (returnval){ [ 1059.372741] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1b151-090a-7524-9f3c-f07850bb7aef" [ 1059.372741] env[69992]: _type = "HttpNfcLease" [ 1059.372741] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1059.380661] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1059.380661] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1b151-090a-7524-9f3c-f07850bb7aef" [ 1059.380661] env[69992]: _type = "HttpNfcLease" [ 1059.380661] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1059.408181] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.408370] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.470972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.471589] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1059.474163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.560s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.476543] env[69992]: INFO nova.compute.claims [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1059.485894] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.488450] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.687512] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.710413] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.729312] env[69992]: INFO nova.compute.manager [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Took 44.99 seconds to build instance. [ 1059.754747] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Releasing lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.755076] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Instance network_info: |[{"id": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "address": "fa:16:3e:13:fa:bc", "network": {"id": "e41a3bc7-387a-4d51-b682-2bae9aa6bab0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-579698461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0183165a87814cc28924a3bf6c07f171", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d406e0-6f", "ovs_interfaceid": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1059.755485] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:fa:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a58387dd-f438-4913-af6a-fafb734cd881', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66d406e0-6f68-43e7-ab80-d030bf95c7bb', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.763577] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Creating folder: Project (0183165a87814cc28924a3bf6c07f171). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1059.764187] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ae327dd-f364-4b2e-a977-1b84cac56eec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.777312] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Created folder: Project (0183165a87814cc28924a3bf6c07f171) in parent group-v581821. [ 1059.777518] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Creating folder: Instances. Parent ref: group-v581981. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1059.777763] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c96d248a-1bf9-41a2-873c-0c76cfdbf096 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.787342] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Created folder: Instances in parent group-v581981. [ 1059.787583] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.787775] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.787974] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-963a2769-f506-4e43-93ba-08d0a50d1392 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.807159] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.807159] env[69992]: value = "task-2897181" [ 1059.807159] env[69992]: _type = "Task" [ 1059.807159] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.816980] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897181, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.883923] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1059.883923] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1b151-090a-7524-9f3c-f07850bb7aef" [ 1059.883923] env[69992]: _type = "HttpNfcLease" [ 1059.883923] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1059.883923] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1059.883923] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f1b151-090a-7524-9f3c-f07850bb7aef" [ 1059.883923] env[69992]: _type = "HttpNfcLease" [ 1059.883923] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1059.883923] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d6de7d-d6a3-499b-b2ff-7a766abce9ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.892360] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522bd276-456b-e9d7-88ed-02fb4a5c589e/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1059.892656] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522bd276-456b-e9d7-88ed-02fb4a5c589e/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1059.985201] env[69992]: DEBUG nova.compute.utils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1059.987310] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1059.987499] env[69992]: DEBUG nova.network.neutron [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1059.990735] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4851d0de-b38f-4814-a444-ebad8fe5c645 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.026683] env[69992]: DEBUG nova.policy [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1060.109524] env[69992]: DEBUG nova.compute.manager [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Received event network-changed-66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.110499] env[69992]: DEBUG nova.compute.manager [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Refreshing instance network info cache due to event network-changed-66d406e0-6f68-43e7-ab80-d030bf95c7bb. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1060.110842] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] Acquiring lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.111085] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] Acquired lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.111372] env[69992]: DEBUG nova.network.neutron [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Refreshing network info cache for port 66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1060.189044] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.213846] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897177, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.231623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b88a0754-785b-46a9-b1f3-dc07536f6f01 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "06442c68-7dc6-46a1-9e35-34a62730a555" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.131s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.319324] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897181, 'name': CreateVM_Task, 'duration_secs': 0.389934} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.319724] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1060.320588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.322333] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.322818] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1060.323159] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a898723-e61d-404b-b813-efc793c42ce9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.329396] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1060.329396] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52caf251-a911-7ba6-c0dd-51cd1a91c1ad" [ 1060.329396] env[69992]: _type = "Task" [ 1060.329396] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.340484] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52caf251-a911-7ba6-c0dd-51cd1a91c1ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.494659] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1060.552063] env[69992]: DEBUG nova.network.neutron [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Successfully created port: 80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1060.690849] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.711801] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897177, 'name': ReconfigVM_Task, 'duration_secs': 1.041354} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.716250] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 673be00f-e3c5-4a54-beeb-cf89828e9e32/673be00f-e3c5-4a54-beeb-cf89828e9e32.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.716390] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a86e7357-4802-429e-ab0f-26f23890546e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.725693] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1060.725693] env[69992]: value = "task-2897182" [ 1060.725693] env[69992]: _type = "Task" [ 1060.725693] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.734702] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1060.752385] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897182, 'name': Rename_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.845063] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52caf251-a911-7ba6-c0dd-51cd1a91c1ad, 'name': SearchDatastore_Task, 'duration_secs': 0.01397} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.845063] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.845063] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.845063] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.845063] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.845063] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.845063] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc87ed39-ee32-49f5-b9a4-bdcc6238700e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.855692] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.856067] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.859853] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87d4ee09-5ac3-45fb-97e4-8ae163bac97d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.867914] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1060.867914] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527a501b-5f67-88ff-4379-bc68d4ba9503" [ 1060.867914] env[69992]: _type = "Task" [ 1060.867914] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.876028] env[69992]: DEBUG nova.network.neutron [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Updated VIF entry in instance network info cache for port 66d406e0-6f68-43e7-ab80-d030bf95c7bb. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.876028] env[69992]: DEBUG nova.network.neutron [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Updating instance_info_cache with network_info: [{"id": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "address": "fa:16:3e:13:fa:bc", "network": {"id": "e41a3bc7-387a-4d51-b682-2bae9aa6bab0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-579698461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0183165a87814cc28924a3bf6c07f171", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d406e0-6f", "ovs_interfaceid": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.884392] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527a501b-5f67-88ff-4379-bc68d4ba9503, 'name': SearchDatastore_Task, 'duration_secs': 0.013287} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.885347] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b49d8399-8620-4626-88f5-fcc2135fe179 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.892967] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1060.892967] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5227b1b3-5e46-f31f-5d7d-0b457821cdfb" [ 1060.892967] env[69992]: _type = "Task" [ 1060.892967] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.910256] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5227b1b3-5e46-f31f-5d7d-0b457821cdfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.188700] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.202647] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea623175-5f2f-45eb-8e63-f3882a8d8622 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.211101] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05af0115-775e-429b-9bc6-4d88faf8f0a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.249835] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d20188-ed19-4807-bfc5-89dd7fc3be55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.264325] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897182, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.268311] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.269664] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc0b647-6bcd-4f97-8cf9-019fa7519fb6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.284870] env[69992]: DEBUG nova.compute.provider_tree [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1061.380035] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3bc1c6e-d05d-4399-be79-63e882140e15 req-9335cb55-aa21-42ca-854f-b4f488e132ea service nova] Releasing lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.405509] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5227b1b3-5e46-f31f-5d7d-0b457821cdfb, 'name': SearchDatastore_Task, 'duration_secs': 0.016391} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.406226] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.406846] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] efa06ccc-be20-4d0e-938f-01c91ef4de8e/efa06ccc-be20-4d0e-938f-01c91ef4de8e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.407758] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4774e28-afd4-4ea2-8349-a3c53fd84a3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.418023] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1061.418023] env[69992]: value = "task-2897183" [ 1061.418023] env[69992]: _type = "Task" [ 1061.418023] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.428603] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.504073] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1061.530686] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1061.531107] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.531311] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1061.531609] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.531850] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1061.532110] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1061.532476] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1061.532667] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1061.532903] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1061.533168] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1061.533468] env[69992]: DEBUG nova.virt.hardware [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1061.535026] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d8dbcd-564c-4025-865c-902dea1905ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.544212] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257cca46-9541-4157-a15e-12c59fdd2050 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.687465] env[69992]: DEBUG oslo_vmware.api [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897164, 'name': ReconfigVM_Task, 'duration_secs': 5.821781} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.687775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.688023] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Reconfigured VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1061.754925] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897182, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.809994] env[69992]: ERROR nova.scheduler.client.report [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [req-1bc9338e-0f64-4c3d-87e7-ae91caef8af2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1bc9338e-0f64-4c3d-87e7-ae91caef8af2"}]} [ 1061.829316] env[69992]: DEBUG nova.scheduler.client.report [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1061.848799] env[69992]: DEBUG nova.scheduler.client.report [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1061.849040] env[69992]: DEBUG nova.compute.provider_tree [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1061.861529] env[69992]: DEBUG nova.scheduler.client.report [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1061.881643] env[69992]: DEBUG nova.scheduler.client.report [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1061.938531] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.087232] env[69992]: DEBUG nova.compute.manager [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-vif-deleted-0229965f-d491-4e94-9f75-201dda751cd0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.088029] env[69992]: INFO nova.compute.manager [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Neutron deleted interface 0229965f-d491-4e94-9f75-201dda751cd0; detaching it from the instance and deleting it from the info cache [ 1062.088029] env[69992]: DEBUG nova.network.neutron [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.263251] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897182, 'name': Rename_Task, 'duration_secs': 1.196329} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.267467] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1062.268656] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc9fb521-bb43-4d72-94dc-43c51616d1b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.276940] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1062.276940] env[69992]: value = "task-2897184" [ 1062.276940] env[69992]: _type = "Task" [ 1062.276940] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.291925] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.432103] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897183, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.488036] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "06442c68-7dc6-46a1-9e35-34a62730a555" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.488863] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "06442c68-7dc6-46a1-9e35-34a62730a555" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.488863] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "06442c68-7dc6-46a1-9e35-34a62730a555-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.488863] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "06442c68-7dc6-46a1-9e35-34a62730a555-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.489090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "06442c68-7dc6-46a1-9e35-34a62730a555-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.491590] env[69992]: INFO nova.compute.manager [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Terminating instance [ 1062.546270] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cebdbc2-4a32-4824-869e-8a262cc44010 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.554684] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75ec2f1-ae15-44fa-ba17-6dee945a330e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.586164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f3fb9e-9318-4434-92fb-9ddc5409d8e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.593269] env[69992]: DEBUG nova.compute.manager [req-d4b12e90-af91-45e8-aa52-a444afa6af49 req-1c995277-a163-46eb-a1bc-aaafe5745290 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Received event network-vif-plugged-80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.593507] env[69992]: DEBUG oslo_concurrency.lockutils [req-d4b12e90-af91-45e8-aa52-a444afa6af49 req-1c995277-a163-46eb-a1bc-aaafe5745290 service nova] Acquiring lock "fcbe1142-72dc-4a02-af9b-e03a2031a247-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.593731] env[69992]: DEBUG oslo_concurrency.lockutils [req-d4b12e90-af91-45e8-aa52-a444afa6af49 req-1c995277-a163-46eb-a1bc-aaafe5745290 service nova] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.593899] env[69992]: DEBUG oslo_concurrency.lockutils [req-d4b12e90-af91-45e8-aa52-a444afa6af49 req-1c995277-a163-46eb-a1bc-aaafe5745290 service nova] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.597024] env[69992]: DEBUG nova.compute.manager [req-d4b12e90-af91-45e8-aa52-a444afa6af49 req-1c995277-a163-46eb-a1bc-aaafe5745290 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] No waiting events found dispatching network-vif-plugged-80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1062.597024] env[69992]: WARNING nova.compute.manager [req-d4b12e90-af91-45e8-aa52-a444afa6af49 req-1c995277-a163-46eb-a1bc-aaafe5745290 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Received unexpected event network-vif-plugged-80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 for instance with vm_state building and task_state spawning. [ 1062.597024] env[69992]: DEBUG oslo_concurrency.lockutils [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.597024] env[69992]: DEBUG oslo_concurrency.lockutils [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] Acquired lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.598184] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ae0357-cd08-4683-9f3c-22a7077961a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.602335] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18af0ce-55ab-4e1e-8819-dde10d2ddf85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.623119] env[69992]: DEBUG oslo_concurrency.lockutils [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] Releasing lock "e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.623785] env[69992]: WARNING nova.compute.manager [req-2bc83b68-f171-4f55-b69d-e16b2c65b4ce req-31c8d93b-6a30-48a1-80ac-2a1dde8be4b7 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Detach interface failed, port_id=0229965f-d491-4e94-9f75-201dda751cd0, reason: No device with interface-id 0229965f-d491-4e94-9f75-201dda751cd0 exists on VM: nova.exception.NotFound: No device with interface-id 0229965f-d491-4e94-9f75-201dda751cd0 exists on VM [ 1062.626961] env[69992]: DEBUG nova.network.neutron [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Successfully updated port: 80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.635670] env[69992]: DEBUG nova.compute.provider_tree [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1062.788217] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.930494] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897183, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.132172} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.930870] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] efa06ccc-be20-4d0e-938f-01c91ef4de8e/efa06ccc-be20-4d0e-938f-01c91ef4de8e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.931096] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.931275] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ef6b429-2717-4ca5-ad12-fa493991dd51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.940612] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1062.940612] env[69992]: value = "task-2897185" [ 1062.940612] env[69992]: _type = "Task" [ 1062.940612] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.951430] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.971678] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.971824] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.972025] env[69992]: DEBUG nova.network.neutron [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.996015] env[69992]: DEBUG nova.compute.manager [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1062.996293] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1062.997439] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55776d5a-75b3-4750-a251-988678a84a5d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.007637] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.007943] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2ebf204-0521-4a9e-ab61-a8142377cf28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.020100] env[69992]: DEBUG oslo_vmware.api [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1063.020100] env[69992]: value = "task-2897186" [ 1063.020100] env[69992]: _type = "Task" [ 1063.020100] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.030883] env[69992]: DEBUG oslo_vmware.api [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.141022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-fcbe1142-72dc-4a02-af9b-e03a2031a247" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.141022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-fcbe1142-72dc-4a02-af9b-e03a2031a247" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.141022] env[69992]: DEBUG nova.network.neutron [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1063.174828] env[69992]: DEBUG nova.scheduler.client.report [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1063.175122] env[69992]: DEBUG nova.compute.provider_tree [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 85 to 86 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1063.175307] env[69992]: DEBUG nova.compute.provider_tree [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1063.288415] env[69992]: DEBUG oslo_vmware.api [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897184, 'name': PowerOnVM_Task, 'duration_secs': 0.743553} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.288689] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1063.288924] env[69992]: INFO nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1063.289127] env[69992]: DEBUG nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1063.289917] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0296244e-7d25-43aa-a8bf-3e88dc249b72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.453267] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086081} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.453475] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.454266] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0b3870-2926-4a2a-84e8-43441f9cf2fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.469220] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.469508] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.469758] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.469908] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.470090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.480809] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] efa06ccc-be20-4d0e-938f-01c91ef4de8e/efa06ccc-be20-4d0e-938f-01c91ef4de8e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.481441] env[69992]: INFO nova.compute.manager [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Terminating instance [ 1063.485030] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-009409e3-0dd5-4a48-a56a-19db64f4485d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.508477] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1063.508477] env[69992]: value = "task-2897187" [ 1063.508477] env[69992]: _type = "Task" [ 1063.508477] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.518949] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.530349] env[69992]: DEBUG oslo_vmware.api [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897186, 'name': PowerOffVM_Task, 'duration_secs': 0.314194} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.532979] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1063.533212] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1063.533770] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f72c054c-31d0-4268-85af-af113a75bf3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.610683] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1063.610886] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1063.611083] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Deleting the datastore file [datastore2] 06442c68-7dc6-46a1-9e35-34a62730a555 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1063.611389] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5603b20c-a640-464b-9117-928f861713e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.623428] env[69992]: DEBUG oslo_vmware.api [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for the task: (returnval){ [ 1063.623428] env[69992]: value = "task-2897189" [ 1063.623428] env[69992]: _type = "Task" [ 1063.623428] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.634549] env[69992]: DEBUG oslo_vmware.api [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.680804] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.206s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.681622] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1063.690984] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.812s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.691162] env[69992]: DEBUG nova.objects.instance [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lazy-loading 'resources' on Instance uuid 27580836-7ab5-4e64-a985-3e6fc22a8b77 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.693266] env[69992]: DEBUG nova.network.neutron [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1063.812393] env[69992]: INFO nova.compute.manager [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Took 47.61 seconds to build instance. [ 1063.866224] env[69992]: DEBUG nova.network.neutron [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Updating instance_info_cache with network_info: [{"id": "80bd277f-8072-43a4-a5a0-6c9f7e01f1a8", "address": "fa:16:3e:a2:4a:f4", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80bd277f-80", "ovs_interfaceid": "80bd277f-8072-43a4-a5a0-6c9f7e01f1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.884662] env[69992]: DEBUG nova.network.neutron [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [{"id": "e64de32e-0e37-4777-91e7-8be0da0fa147", "address": "fa:16:3e:c2:9f:3c", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape64de32e-0e", "ovs_interfaceid": "e64de32e-0e37-4777-91e7-8be0da0fa147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.003033] env[69992]: DEBUG nova.compute.manager [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.003033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.003888] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ede4742-9a51-409f-a81f-afaf328e5a21 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.015339] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.015769] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c37329a1-62e5-4c72-8453-8a98a826cffa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.021679] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897187, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.025157] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1064.025157] env[69992]: value = "task-2897190" [ 1064.025157] env[69992]: _type = "Task" [ 1064.025157] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.033345] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.092458] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] Acquiring lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.093039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] Acquired lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.093039] env[69992]: DEBUG nova.network.neutron [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.135583] env[69992]: DEBUG oslo_vmware.api [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Task: {'id': task-2897189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402244} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.135897] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.136173] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.136413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.136666] env[69992]: INFO nova.compute.manager [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1064.136955] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1064.137212] env[69992]: DEBUG nova.compute.manager [-] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1064.137349] env[69992]: DEBUG nova.network.neutron [-] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1064.199506] env[69992]: DEBUG nova.compute.utils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1064.201087] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1064.201260] env[69992]: DEBUG nova.network.neutron [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1064.252786] env[69992]: DEBUG nova.policy [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd2db25ef40744d5908197233a0c0f1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8217315011854468b0cc17c4dfe342f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.315061] env[69992]: DEBUG oslo_concurrency.lockutils [None req-220ce353-a9f3-455d-a30f-c89a8902c63e tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.768s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.368582] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-fcbe1142-72dc-4a02-af9b-e03a2031a247" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.368826] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Instance network_info: |[{"id": "80bd277f-8072-43a4-a5a0-6c9f7e01f1a8", "address": "fa:16:3e:a2:4a:f4", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80bd277f-80", "ovs_interfaceid": "80bd277f-8072-43a4-a5a0-6c9f7e01f1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1064.369593] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:4a:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80bd277f-8072-43a4-a5a0-6c9f7e01f1a8', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1064.377699] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating folder: Project (57eaf44c4ac5491380b329e1e86e9454). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1064.380663] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d4c4d1a-ec7f-4e88-a330-48825da44100 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.387737] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-e5d9de80-1ee5-462a-8459-168fd60e1972" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.398461] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created folder: Project (57eaf44c4ac5491380b329e1e86e9454) in parent group-v581821. [ 1064.398690] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating folder: Instances. Parent ref: group-v581984. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1064.399607] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c44fa887-28d7-4ba8-b79e-0c8ff5121d57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.414667] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created folder: Instances in parent group-v581984. [ 1064.414828] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1064.414963] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1064.415208] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48b37dea-678b-4101-b99b-bc279787e87e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.445831] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1064.445831] env[69992]: value = "task-2897193" [ 1064.445831] env[69992]: _type = "Task" [ 1064.445831] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.458421] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897193, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.522290] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897187, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.539022] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.592912] env[69992]: DEBUG nova.network.neutron [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Successfully created port: 789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1064.707237] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1064.819711] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1064.892588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7483952f-4492-4223-82a6-3d28feb8cafa tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-e5d9de80-1ee5-462a-8459-168fd60e1972-0229965f-d491-4e94-9f75-201dda751cd0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.853s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.931307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d163c1-e141-4db1-99f1-f834f92ae618 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.940518] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667f4744-b343-459c-bc58-ffbea3aad80a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.957020] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897193, 'name': CreateVM_Task, 'duration_secs': 0.468307} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.986498] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1064.990131] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.990887] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.990887] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1064.991577] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cd200e-7476-4931-96d3-af608d928799 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.995148] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5495e10e-0ed0-4d37-87a9-bf6b5b6a94f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.000204] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1065.000204] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5236a5cb-6c96-6050-8932-461ff78befce" [ 1065.000204] env[69992]: _type = "Task" [ 1065.000204] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.006246] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c3be80-17eb-4aa5-b871-32df0c711bb7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.017083] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5236a5cb-6c96-6050-8932-461ff78befce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.025189] env[69992]: DEBUG nova.compute.provider_tree [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.032731] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897187, 'name': ReconfigVM_Task, 'duration_secs': 1.435898} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.033527] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Reconfigured VM instance instance-00000035 to attach disk [datastore1] efa06ccc-be20-4d0e-938f-01c91ef4de8e/efa06ccc-be20-4d0e-938f-01c91ef4de8e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.034181] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b558e7e1-1d30-4331-ae1a-6422ac98d952 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.038783] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897190, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.045678] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1065.045678] env[69992]: value = "task-2897194" [ 1065.045678] env[69992]: _type = "Task" [ 1065.045678] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.055852] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897194, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.058251] env[69992]: DEBUG nova.network.neutron [-] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.095722] env[69992]: DEBUG nova.compute.manager [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Received event network-changed-80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1065.095934] env[69992]: DEBUG nova.compute.manager [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Refreshing instance network info cache due to event network-changed-80bd277f-8072-43a4-a5a0-6c9f7e01f1a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1065.096392] env[69992]: DEBUG oslo_concurrency.lockutils [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] Acquiring lock "refresh_cache-fcbe1142-72dc-4a02-af9b-e03a2031a247" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.096392] env[69992]: DEBUG oslo_concurrency.lockutils [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] Acquired lock "refresh_cache-fcbe1142-72dc-4a02-af9b-e03a2031a247" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.096609] env[69992]: DEBUG nova.network.neutron [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Refreshing network info cache for port 80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1065.310183] env[69992]: DEBUG nova.network.neutron [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Updating instance_info_cache with network_info: [{"id": "3479d475-b805-49db-a031-c31a6724c10d", "address": "fa:16:3e:db:b1:08", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3479d475-b8", "ovs_interfaceid": "3479d475-b805-49db-a031-c31a6724c10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.343135] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.515060] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5236a5cb-6c96-6050-8932-461ff78befce, 'name': SearchDatastore_Task, 'duration_secs': 0.015935} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.515253] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.515419] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.515753] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.516040] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.516379] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.516810] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc7a94c6-9664-413e-b4ca-26b174710d42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.528182] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.528384] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1065.532639] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaa5c87a-5a24-4744-a857-04e7a279c44f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.535397] env[69992]: DEBUG nova.scheduler.client.report [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.543301] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1065.543301] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522fb8e3-2659-bf9b-9986-4914ba9afffc" [ 1065.543301] env[69992]: _type = "Task" [ 1065.543301] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.546604] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897190, 'name': PowerOffVM_Task, 'duration_secs': 1.050584} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.553400] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.553649] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.553968] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51b5a4a9-979d-4882-8f4d-c56c1ef6f38f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.562270] env[69992]: INFO nova.compute.manager [-] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Took 1.42 seconds to deallocate network for instance. [ 1065.562591] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522fb8e3-2659-bf9b-9986-4914ba9afffc, 'name': SearchDatastore_Task, 'duration_secs': 0.015405} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.568833] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897194, 'name': Rename_Task, 'duration_secs': 0.199538} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.572147] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-759a3da3-95bb-484e-81c2-880e629a8677 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.575741] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1065.575870] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a493a16c-abf7-4b5d-a6c9-c879724d3434 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.582273] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1065.582273] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5257b392-8e5f-0f15-3dd9-b7910b2db220" [ 1065.582273] env[69992]: _type = "Task" [ 1065.582273] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.589027] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1065.589027] env[69992]: value = "task-2897196" [ 1065.589027] env[69992]: _type = "Task" [ 1065.589027] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.595827] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5257b392-8e5f-0f15-3dd9-b7910b2db220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.603799] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.636747] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.637260] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.637667] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleting the datastore file [datastore1] e5d9de80-1ee5-462a-8459-168fd60e1972 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.638188] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-940a8221-479c-476f-9367-f81eb7e9f9a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.652934] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1065.652934] env[69992]: value = "task-2897197" [ 1065.652934] env[69992]: _type = "Task" [ 1065.652934] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.662474] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.723674] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1065.753039] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.753324] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.753491] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.753704] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.753846] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.753994] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.754898] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.755180] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.755431] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.755664] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.755906] env[69992]: DEBUG nova.virt.hardware [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.756902] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72c7637-12e2-4cf8-b332-8870273511f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.769019] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f797a031-5227-41a8-8bad-12b64d14377e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.813636] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] Releasing lock "refresh_cache-673be00f-e3c5-4a54-beeb-cf89828e9e32" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.813984] env[69992]: DEBUG nova.compute.manager [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Inject network info {{(pid=69992) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1065.814286] env[69992]: DEBUG nova.compute.manager [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] network_info to inject: |[{"id": "3479d475-b805-49db-a031-c31a6724c10d", "address": "fa:16:3e:db:b1:08", "network": {"id": "918ab136-b380-4ccd-b218-738aac4652fa", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1944921913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02824f4021a5400583cf13cd553207fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac2c9d07-ed01-47a9-88f1-562992bc1076", "external-id": "nsx-vlan-transportzone-968", "segmentation_id": 968, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3479d475-b8", "ovs_interfaceid": "3479d475-b805-49db-a031-c31a6724c10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1065.819703] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Reconfiguring VM instance to set the machine id {{(pid=69992) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1065.820621] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d0d33c2-01ea-47a0-8a7e-bc62c2e4a8dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.840922] env[69992]: DEBUG oslo_vmware.api [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] Waiting for the task: (returnval){ [ 1065.840922] env[69992]: value = "task-2897198" [ 1065.840922] env[69992]: _type = "Task" [ 1065.840922] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.853177] env[69992]: DEBUG oslo_vmware.api [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] Task: {'id': task-2897198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.043599] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.353s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.047176] env[69992]: DEBUG nova.network.neutron [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Updated VIF entry in instance network info cache for port 80bd277f-8072-43a4-a5a0-6c9f7e01f1a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1066.047284] env[69992]: DEBUG nova.network.neutron [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Updating instance_info_cache with network_info: [{"id": "80bd277f-8072-43a4-a5a0-6c9f7e01f1a8", "address": "fa:16:3e:a2:4a:f4", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80bd277f-80", "ovs_interfaceid": "80bd277f-8072-43a4-a5a0-6c9f7e01f1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.048568] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.671s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.050178] env[69992]: DEBUG nova.objects.instance [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lazy-loading 'resources' on Instance uuid 73e41918-88b8-4ff7-9fdd-b45ac97c80ec {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.074491] env[69992]: INFO nova.scheduler.client.report [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Deleted allocations for instance 27580836-7ab5-4e64-a985-3e6fc22a8b77 [ 1066.076511] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.108941] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897196, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.111031] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5257b392-8e5f-0f15-3dd9-b7910b2db220, 'name': SearchDatastore_Task, 'duration_secs': 0.011538} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.111031] env[69992]: DEBUG nova.network.neutron [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Successfully updated port: 789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1066.111031] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.114525] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] fcbe1142-72dc-4a02-af9b-e03a2031a247/fcbe1142-72dc-4a02-af9b-e03a2031a247.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1066.114525] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea471f01-7d20-46c7-8f7f-db3878f8a0f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.122777] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1066.122777] env[69992]: value = "task-2897199" [ 1066.122777] env[69992]: _type = "Task" [ 1066.122777] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.134683] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.165824] env[69992]: DEBUG oslo_vmware.api [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193035} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.166194] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.166348] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.166541] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.167196] env[69992]: INFO nova.compute.manager [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Took 2.16 seconds to destroy the instance on the hypervisor. [ 1066.167196] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1066.167631] env[69992]: DEBUG nova.compute.manager [-] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1066.167732] env[69992]: DEBUG nova.network.neutron [-] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1066.288791] env[69992]: DEBUG nova.compute.manager [req-9b5c825e-b52a-405b-a4be-e575d239b8d2 req-a4e6f6d7-42d5-4a78-9bf9-e153c683d5ad service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received event network-vif-plugged-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.289042] env[69992]: DEBUG oslo_concurrency.lockutils [req-9b5c825e-b52a-405b-a4be-e575d239b8d2 req-a4e6f6d7-42d5-4a78-9bf9-e153c683d5ad service nova] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.289467] env[69992]: DEBUG oslo_concurrency.lockutils [req-9b5c825e-b52a-405b-a4be-e575d239b8d2 req-a4e6f6d7-42d5-4a78-9bf9-e153c683d5ad service nova] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.289657] env[69992]: DEBUG oslo_concurrency.lockutils [req-9b5c825e-b52a-405b-a4be-e575d239b8d2 req-a4e6f6d7-42d5-4a78-9bf9-e153c683d5ad service nova] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.289904] env[69992]: DEBUG nova.compute.manager [req-9b5c825e-b52a-405b-a4be-e575d239b8d2 req-a4e6f6d7-42d5-4a78-9bf9-e153c683d5ad service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] No waiting events found dispatching network-vif-plugged-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1066.290154] env[69992]: WARNING nova.compute.manager [req-9b5c825e-b52a-405b-a4be-e575d239b8d2 req-a4e6f6d7-42d5-4a78-9bf9-e153c683d5ad service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received unexpected event network-vif-plugged-789f6123-167b-48dd-ae68-cfdbc1d5c78a for instance with vm_state building and task_state spawning. [ 1066.353049] env[69992]: DEBUG oslo_vmware.api [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] Task: {'id': task-2897198, 'name': ReconfigVM_Task, 'duration_secs': 0.187758} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.353355] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b396db88-91a7-4557-9c88-83901913a8a0 tempest-ServersAdminTestJSON-2140674545 tempest-ServersAdminTestJSON-2140674545-project-admin] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Reconfigured VM instance to set the machine id {{(pid=69992) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1066.555022] env[69992]: DEBUG oslo_concurrency.lockutils [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] Releasing lock "refresh_cache-fcbe1142-72dc-4a02-af9b-e03a2031a247" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.555022] env[69992]: DEBUG nova.compute.manager [req-43f50392-22cc-48a4-bf2d-371db6d6f298 req-6fa93ad8-02c1-4a04-806a-bf257d4c1e4f service nova] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Received event network-vif-deleted-3b8fa629-0413-47b5-9a6d-7b64d336638d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.601222] env[69992]: DEBUG oslo_concurrency.lockutils [None req-467275c3-c353-464a-a968-ce00629a4908 tempest-ServerRescueTestJSON-1973402 tempest-ServerRescueTestJSON-1973402-project-member] Lock "27580836-7ab5-4e64-a985-3e6fc22a8b77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.732s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.625367] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.625571] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.625742] env[69992]: DEBUG nova.network.neutron [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.627381] env[69992]: DEBUG oslo_vmware.api [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897196, 'name': PowerOnVM_Task, 'duration_secs': 0.638998} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.629042] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1066.629374] env[69992]: INFO nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Took 10.10 seconds to spawn the instance on the hypervisor. [ 1066.629591] env[69992]: DEBUG nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.634785] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b50a49-1340-4f2b-9afe-cad4d9563e82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.650496] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897199, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.144369] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783987} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.149021] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] fcbe1142-72dc-4a02-af9b-e03a2031a247/fcbe1142-72dc-4a02-af9b-e03a2031a247.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1067.149021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1067.149021] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bad2694e-ba50-4c0c-ab3a-84e90bd3e5cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.156286] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1067.156286] env[69992]: value = "task-2897200" [ 1067.156286] env[69992]: _type = "Task" [ 1067.156286] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.156643] env[69992]: DEBUG nova.network.neutron [-] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.175326] env[69992]: DEBUG nova.network.neutron [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1067.177634] env[69992]: INFO nova.compute.manager [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Took 47.12 seconds to build instance. [ 1067.185951] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897200, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.290772] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d388ba1a-cef5-4e0f-847d-3fa3a15cc6c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.300131] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5ba62c-e3d5-48fc-8b8f-95fdf52d81a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.336415] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51c9cce-986c-4c2a-97ff-3435e4bea4b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.346555] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64003b94-8380-41e2-8332-a78e2e29ecd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.362387] env[69992]: DEBUG nova.compute.provider_tree [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.372420] env[69992]: DEBUG nova.network.neutron [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.667854] env[69992]: INFO nova.compute.manager [-] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Took 1.50 seconds to deallocate network for instance. [ 1067.669765] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897200, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146826} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.671577] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.676158] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4248294c-1816-44ab-9b90-95ba68516675 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.702816] env[69992]: DEBUG oslo_concurrency.lockutils [None req-08c726df-70b1-4cc3-86d3-7194e9c4591f tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.477s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.719147] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] fcbe1142-72dc-4a02-af9b-e03a2031a247/fcbe1142-72dc-4a02-af9b-e03a2031a247.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.720619] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4135ba03-35fb-45f9-a65c-40fbb59a0fb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.743264] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1067.753541] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1067.753541] env[69992]: value = "task-2897201" [ 1067.753541] env[69992]: _type = "Task" [ 1067.753541] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.762812] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.866604] env[69992]: DEBUG nova.scheduler.client.report [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.874969] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.875282] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Instance network_info: |[{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1067.875685] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:f2:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '789f6123-167b-48dd-ae68-cfdbc1d5c78a', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.884151] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.884772] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.887036] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed92f589-712e-45ca-91e5-47012a1537aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.910923] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.910923] env[69992]: value = "task-2897202" [ 1067.910923] env[69992]: _type = "Task" [ 1067.910923] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.920930] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897202, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.034610] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.034916] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.035264] env[69992]: INFO nova.compute.manager [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Rebooting instance [ 1068.181794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.270276] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897201, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.271683] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.373083] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.322s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.374078] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.084s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.374386] env[69992]: DEBUG nova.objects.instance [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'resources' on Instance uuid dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.399285] env[69992]: INFO nova.scheduler.client.report [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Deleted allocations for instance 73e41918-88b8-4ff7-9fdd-b45ac97c80ec [ 1068.405578] env[69992]: INFO nova.compute.manager [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Rebuilding instance [ 1068.423348] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897202, 'name': CreateVM_Task, 'duration_secs': 0.457058} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.427901] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.429323] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.429323] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.429636] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.429811] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7b44dda-a079-4b49-b60f-b2cf088348bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.439709] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1068.439709] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521d193c-c10d-3c37-690f-fb53e3cb780a" [ 1068.439709] env[69992]: _type = "Task" [ 1068.439709] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.452870] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521d193c-c10d-3c37-690f-fb53e3cb780a, 'name': SearchDatastore_Task, 'duration_secs': 0.011739} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.455736] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.456008] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1068.456262] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.456423] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.456610] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1068.457129] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91c33054-0603-497c-888b-fdbf2f5444d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.466232] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1068.466418] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1068.469670] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a05bac8-14e8-443f-8955-cf426e5b4b3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.475052] env[69992]: DEBUG nova.compute.manager [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.475892] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05b23a2-f689-4d9c-b059-c54573ab8514 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.479681] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1068.479681] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52aee074-3ff5-bea5-08e7-cfbc077e8dca" [ 1068.479681] env[69992]: _type = "Task" [ 1068.479681] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.493874] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52aee074-3ff5-bea5-08e7-cfbc077e8dca, 'name': SearchDatastore_Task, 'duration_secs': 0.011516} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.494676] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00e7bb71-4624-4586-8730-d38166773d91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.499533] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1068.499533] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52737e02-bfa1-cb7b-3e4d-4aafeb17b820" [ 1068.499533] env[69992]: _type = "Task" [ 1068.499533] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.508957] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52737e02-bfa1-cb7b-3e4d-4aafeb17b820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.559358] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.559550] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquired lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.559730] env[69992]: DEBUG nova.network.neutron [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.565377] env[69992]: DEBUG nova.compute.manager [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.565377] env[69992]: DEBUG nova.compute.manager [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing instance network info cache due to event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1068.565377] env[69992]: DEBUG oslo_concurrency.lockutils [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.567317] env[69992]: DEBUG oslo_concurrency.lockutils [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.567619] env[69992]: DEBUG nova.network.neutron [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.768470] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897201, 'name': ReconfigVM_Task, 'duration_secs': 0.575827} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.768852] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Reconfigured VM instance instance-00000036 to attach disk [datastore1] fcbe1142-72dc-4a02-af9b-e03a2031a247/fcbe1142-72dc-4a02-af9b-e03a2031a247.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1068.769562] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0ef962f-2601-4b5f-91fa-737e2ece62d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.776908] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1068.776908] env[69992]: value = "task-2897203" [ 1068.776908] env[69992]: _type = "Task" [ 1068.776908] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.790007] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897203, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.798582] env[69992]: DEBUG nova.network.neutron [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Updating instance_info_cache with network_info: [{"id": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "address": "fa:16:3e:13:fa:bc", "network": {"id": "e41a3bc7-387a-4d51-b682-2bae9aa6bab0", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-579698461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0183165a87814cc28924a3bf6c07f171", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a58387dd-f438-4913-af6a-fafb734cd881", "external-id": "nsx-vlan-transportzone-169", "segmentation_id": 169, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66d406e0-6f", "ovs_interfaceid": "66d406e0-6f68-43e7-ab80-d030bf95c7bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.877989] env[69992]: DEBUG nova.objects.instance [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'numa_topology' on Instance uuid dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.908205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e06b2835-a694-4760-8865-45d4add041e4 tempest-ListImageFiltersTestJSON-706051381 tempest-ListImageFiltersTestJSON-706051381-project-member] Lock "73e41918-88b8-4ff7-9fdd-b45ac97c80ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.043s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.938794] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522bd276-456b-e9d7-88ed-02fb4a5c589e/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1068.939255] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de290032-e480-4be7-8964-a906bd3b8630 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.949527] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522bd276-456b-e9d7-88ed-02fb4a5c589e/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1068.949527] env[69992]: ERROR oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522bd276-456b-e9d7-88ed-02fb4a5c589e/disk-0.vmdk due to incomplete transfer. [ 1068.949527] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2e11735e-e6e0-4a61-8192-6ea2a6235614 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.955398] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522bd276-456b-e9d7-88ed-02fb4a5c589e/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1068.955574] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Uploaded image 3e12f89e-654b-4bb0-ad8e-71e43e4eaf9a to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1068.958660] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1068.958660] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-543bf559-f464-4352-9ba8-c33561fc64f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.966147] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1068.966147] env[69992]: value = "task-2897204" [ 1068.966147] env[69992]: _type = "Task" [ 1068.966147] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.973093] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897204, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.014252] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52737e02-bfa1-cb7b-3e4d-4aafeb17b820, 'name': SearchDatastore_Task, 'duration_secs': 0.011745} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.014524] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.014781] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/1b4da2ab-d026-45d8-8234-79ddd84d5cbb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1069.015142] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1209ab8-27a0-40ad-9f7c-aa0039904872 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.022229] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1069.022229] env[69992]: value = "task-2897205" [ 1069.022229] env[69992]: _type = "Task" [ 1069.022229] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.033498] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.288680] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897203, 'name': Rename_Task, 'duration_secs': 0.180214} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.289030] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.289292] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef5569dc-d25c-49f9-8435-0321a24fc9b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.296057] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1069.296057] env[69992]: value = "task-2897206" [ 1069.296057] env[69992]: _type = "Task" [ 1069.296057] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.301588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Releasing lock "refresh_cache-efa06ccc-be20-4d0e-938f-01c91ef4de8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.308924] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.350349] env[69992]: DEBUG nova.network.neutron [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updated VIF entry in instance network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.350741] env[69992]: DEBUG nova.network.neutron [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.381389] env[69992]: DEBUG nova.objects.base [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1069.479807] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897204, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.496171] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.496517] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d28116e-34b9-4561-b7a0-25c79d2822ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.507103] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1069.507103] env[69992]: value = "task-2897207" [ 1069.507103] env[69992]: _type = "Task" [ 1069.507103] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.516266] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.537802] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897205, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.807762] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897206, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.808242] env[69992]: DEBUG nova.compute.manager [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.811572] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4164d90-a464-4a98-845d-3adad1d7a2fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.853200] env[69992]: DEBUG oslo_concurrency.lockutils [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.853606] env[69992]: DEBUG nova.compute.manager [req-6ddb9a02-89c3-4987-a7b8-f7f402f3eafd req-34f9d7fa-904a-4a28-8f75-6d5fdcf72f40 service nova] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Received event network-vif-deleted-e64de32e-0e37-4777-91e7-8be0da0fa147 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1069.970796] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76688f36-4676-45d6-99ef-a6ee1ab586e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.980483] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216b198f-a808-4d88-bd27-167ef0bd1a42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.983735] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897204, 'name': Destroy_Task, 'duration_secs': 0.654907} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.983974] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroyed the VM [ 1069.984223] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1069.984751] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-df36b754-bfcd-436a-9dbf-235b3ab52426 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.016882] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679ca271-5d13-4c2c-b83f-5f2c2529e03e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.019303] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1070.019303] env[69992]: value = "task-2897208" [ 1070.019303] env[69992]: _type = "Task" [ 1070.019303] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.029058] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897207, 'name': PowerOffVM_Task, 'duration_secs': 0.207051} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.033198] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2fe208-5b4e-423b-9f08-bfadff6dc54a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.037455] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.037759] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.041189] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd024c08-2d8e-487e-994d-82ce49b042b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.044221] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897208, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.049051] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897205, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683384} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.058665] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/1b4da2ab-d026-45d8-8234-79ddd84d5cbb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1070.058907] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1070.059221] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1070.059706] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1070.061317] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e5c3741-fff5-43aa-b752-68d63e5acfce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.063344] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-432913d0-3a3d-4c86-b001-684281a11d28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.071564] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1070.071564] env[69992]: value = "task-2897209" [ 1070.071564] env[69992]: _type = "Task" [ 1070.071564] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.081201] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.133212] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.133212] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.133730] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleting the datastore file [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.133730] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b06ceb51-6b05-4577-91bb-47d76b49a29f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.140087] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1070.140087] env[69992]: value = "task-2897211" [ 1070.140087] env[69992]: _type = "Task" [ 1070.140087] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.149400] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.308544] env[69992]: DEBUG oslo_vmware.api [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897206, 'name': PowerOnVM_Task, 'duration_secs': 0.972234} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.308846] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1070.309675] env[69992]: INFO nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1070.309675] env[69992]: DEBUG nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.310027] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198d3ccd-bef6-4081-94c4-63ec763eae6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.530063] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897208, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.582197] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121378} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.583421] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.583421] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfbd74e-4fd9-4db8-ada1-34ba01d52368 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.605554] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/1b4da2ab-d026-45d8-8234-79ddd84d5cbb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.606730] env[69992]: ERROR nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [req-04881819-370c-4ef7-a0ac-0f71a482d6ca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-04881819-370c-4ef7-a0ac-0f71a482d6ca"}]} [ 1070.607094] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6f0127f-7215-48e4-8602-9d0989744e78 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.629684] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1070.629684] env[69992]: value = "task-2897212" [ 1070.629684] env[69992]: _type = "Task" [ 1070.629684] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.640602] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.641469] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1070.653389] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29031} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.653389] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.653389] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1070.653389] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1070.657262] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1070.657506] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1070.669978] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1070.701945] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1070.737818] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.737993] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.829723] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebad2872-c478-4715-a860-eeb49ac4ee39 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.833211] env[69992]: INFO nova.compute.manager [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Took 48.74 seconds to build instance. [ 1070.839646] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Doing hard reboot of VM {{(pid=69992) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1070.842917] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-dc5413b4-ca15-401b-b52c-5f039b9ed5f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.851725] env[69992]: DEBUG oslo_vmware.api [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1070.851725] env[69992]: value = "task-2897213" [ 1070.851725] env[69992]: _type = "Task" [ 1070.851725] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.860717] env[69992]: DEBUG oslo_vmware.api [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897213, 'name': ResetVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.030572] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897208, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.145865] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.238373] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e12a01f-d218-402d-9ef9-107137e86a3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.247435] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6df586c-a80e-49b6-bf17-1a0ed17ba0f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.281641] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41291f3e-c96b-45f7-8638-72b62f73e629 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.289189] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7f3eeb-8353-41c0-88c7-57be038181d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.306019] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1071.335655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87a91bdb-df97-42c1-bbbd-f15e7006e97d tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.364182] env[69992]: DEBUG oslo_vmware.api [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897213, 'name': ResetVM_Task, 'duration_secs': 0.108346} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.364182] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Did hard reboot of VM {{(pid=69992) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1071.364182] env[69992]: DEBUG nova.compute.manager [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1071.364182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845dd197-bfc8-496a-b700-0cfdbabfcdf3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.531629] env[69992]: DEBUG oslo_vmware.api [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897208, 'name': RemoveSnapshot_Task, 'duration_secs': 1.234867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.531924] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1071.532221] env[69992]: INFO nova.compute.manager [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 15.87 seconds to snapshot the instance on the hypervisor. [ 1071.640538] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897212, 'name': ReconfigVM_Task, 'duration_secs': 0.677551} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.640805] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/1b4da2ab-d026-45d8-8234-79ddd84d5cbb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1071.641470] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c42b1cbc-6334-474b-b81b-61e61a730ba9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.646928] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1071.646928] env[69992]: value = "task-2897214" [ 1071.646928] env[69992]: _type = "Task" [ 1071.646928] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.654431] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897214, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.684523] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1071.685294] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.685294] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1071.685294] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.685294] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1071.685487] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1071.685578] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1071.685737] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1071.685901] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1071.686074] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1071.686248] env[69992]: DEBUG nova.virt.hardware [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1071.687085] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0007577b-c349-453d-bc01-bf1f6d5eda75 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.694141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efed0e0a-12e6-4775-bc19-b1ec24f92033 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.707253] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:85:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '617fc6d5-b33e-407b-8a59-8a6def94c1f4', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1071.714755] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.714988] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1071.715206] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4be935b8-7a37-4e94-91b8-165e1b381355 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.732972] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1071.732972] env[69992]: value = "task-2897215" [ 1071.732972] env[69992]: _type = "Task" [ 1071.732972] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.740407] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897215, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.827854] env[69992]: ERROR nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [req-6188c715-726d-4399-bb2f-78b51bccba3f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6188c715-726d-4399-bb2f-78b51bccba3f"}]} [ 1071.840647] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1071.848666] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1071.869334] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1071.869584] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1071.878909] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fcffef69-a507-46ad-b934-9c1b8c739ac9 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.844s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.892763] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1071.910686] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1072.082648] env[69992]: DEBUG nova.compute.manager [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Found 3 images (rotation: 2) {{(pid=69992) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1072.082759] env[69992]: DEBUG nova.compute.manager [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Rotating out 1 backups {{(pid=69992) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1072.082929] env[69992]: DEBUG nova.compute.manager [None req-9cf5e133-05d2-4a45-bcb3-3c13053a4d60 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleting image 6534e8d5-5df3-44be-a620-28db421259d9 {{(pid=69992) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1072.159823] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897214, 'name': Rename_Task, 'duration_secs': 0.244378} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.159823] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.160042] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0358e833-8100-4e5c-aa6e-eca38c0ece72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.168688] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1072.168688] env[69992]: value = "task-2897216" [ 1072.168688] env[69992]: _type = "Task" [ 1072.168688] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.180609] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.248924] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897215, 'name': CreateVM_Task, 'duration_secs': 0.48219} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.248924] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1072.248924] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.248924] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.248924] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1072.248924] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e41f5d57-ed51-4d88-a11d-9f9688078cb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.253552] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1072.253552] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52968e03-6ed4-7032-926d-4e78e527498b" [ 1072.253552] env[69992]: _type = "Task" [ 1072.253552] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.268034] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52968e03-6ed4-7032-926d-4e78e527498b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.370620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.423910] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c66e87-83b7-4a45-be3c-55224fc4fb71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.432282] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d85191e-5a4b-4d1d-8d04-4da1c2e9c9bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.465041] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d8b1bf-3755-4134-a116-4d78a683410a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.476060] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddf7222-0ed2-4ec8-a960-ff7aab40489f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.490508] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1072.681870] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897216, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.766289] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52968e03-6ed4-7032-926d-4e78e527498b, 'name': SearchDatastore_Task, 'duration_secs': 0.017199} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.766604] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.766837] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1072.767087] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.767237] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.767416] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1072.767696] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67d894d9-bf43-4d9b-847b-190992d6bbb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.778478] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1072.779241] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1072.780840] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9146f18b-e551-4a8f-a35b-20f133143b3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.786454] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "bce01d14-3c1b-4dce-b61c-721e25a56497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.786684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.791080] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1072.791080] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5212af9b-d891-9a9c-8bbd-b08ae6b2e681" [ 1072.791080] env[69992]: _type = "Task" [ 1072.791080] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.800410] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5212af9b-d891-9a9c-8bbd-b08ae6b2e681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.013601] env[69992]: ERROR nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [req-b2c95f8b-6716-4579-8fdc-1766e5534ee4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b2c95f8b-6716-4579-8fdc-1766e5534ee4"}]} [ 1073.030552] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1073.043872] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1073.045148] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1073.058281] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1073.078981] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1073.182915] env[69992]: DEBUG oslo_vmware.api [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897216, 'name': PowerOnVM_Task, 'duration_secs': 0.736137} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.183444] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.183702] env[69992]: INFO nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Took 7.46 seconds to spawn the instance on the hypervisor. [ 1073.183946] env[69992]: DEBUG nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.184765] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009bfd1c-d568-499d-9549-063a0644a964 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.303411] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5212af9b-d891-9a9c-8bbd-b08ae6b2e681, 'name': SearchDatastore_Task, 'duration_secs': 0.011652} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.306694] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbd81fef-21be-45d9-a776-946358d42901 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.312648] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1073.312648] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525a6bb9-e609-aa33-530d-81abf90189d0" [ 1073.312648] env[69992]: _type = "Task" [ 1073.312648] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.323609] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525a6bb9-e609-aa33-530d-81abf90189d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.381594] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.381865] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.382092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.382212] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.382380] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.384704] env[69992]: INFO nova.compute.manager [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Terminating instance [ 1073.571160] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692ae3a8-b6ee-41bb-b840-ecf1ea452b5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.579107] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80387a5f-e425-4755-b3f2-076badb73473 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.610857] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4dfa236-e0ab-430e-b5ec-6360592f9ed9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.619199] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22bcdba2-58dc-47a0-8bec-108c08493a2f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.633207] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1073.705595] env[69992]: INFO nova.compute.manager [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Took 49.84 seconds to build instance. [ 1073.824026] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525a6bb9-e609-aa33-530d-81abf90189d0, 'name': SearchDatastore_Task, 'duration_secs': 0.025057} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.824134] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.824388] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1073.824647] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5991c82-82b1-4b5b-9491-f8ce25d64d6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.833491] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1073.833491] env[69992]: value = "task-2897217" [ 1073.833491] env[69992]: _type = "Task" [ 1073.833491] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.842698] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.887738] env[69992]: DEBUG nova.compute.manager [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1073.888018] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.889924] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f228c1b-8a62-4747-97b9-9dbad2c5892d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.899741] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.900026] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d229576e-624d-4041-9e69-27d9bafa31c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.908177] env[69992]: DEBUG oslo_vmware.api [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1073.908177] env[69992]: value = "task-2897218" [ 1073.908177] env[69992]: _type = "Task" [ 1073.908177] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.917295] env[69992]: DEBUG oslo_vmware.api [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.168341] env[69992]: DEBUG nova.scheduler.client.report [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1074.168725] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 89 to 90 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1074.168951] env[69992]: DEBUG nova.compute.provider_tree [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1074.207719] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9ede1fb9-9f91-40cf-ac71-324c0f2e7416 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.658s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.296589] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.296916] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.297212] env[69992]: DEBUG nova.compute.manager [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.298295] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd056ed-e5cc-4a7e-bef1-80891a985cb8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.309572] env[69992]: DEBUG nova.compute.manager [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1074.310588] env[69992]: DEBUG nova.objects.instance [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'flavor' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.347934] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897217, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.421104] env[69992]: DEBUG oslo_vmware.api [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897218, 'name': PowerOffVM_Task, 'duration_secs': 0.189953} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.421591] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.421720] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.422030] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35f11396-bb9e-42e0-bdb4-909f9fd90ff7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.498351] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.498885] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.498885] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Deleting the datastore file [datastore1] efa06ccc-be20-4d0e-938f-01c91ef4de8e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.499157] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83f97c3e-ce85-467b-ab5a-b2705aada315 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.506231] env[69992]: DEBUG oslo_vmware.api [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for the task: (returnval){ [ 1074.506231] env[69992]: value = "task-2897220" [ 1074.506231] env[69992]: _type = "Task" [ 1074.506231] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.515129] env[69992]: DEBUG oslo_vmware.api [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.676324] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 6.302s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.679062] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.963s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.680577] env[69992]: INFO nova.compute.claims [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.713292] env[69992]: DEBUG nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1074.845711] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897217, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657636} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.847109] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1074.847324] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1074.848777] env[69992]: DEBUG nova.compute.manager [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.848964] env[69992]: DEBUG nova.compute.manager [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing instance network info cache due to event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1074.850335] env[69992]: DEBUG oslo_concurrency.lockutils [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.850503] env[69992]: DEBUG oslo_concurrency.lockutils [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.850677] env[69992]: DEBUG nova.network.neutron [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1074.851881] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-985db1c8-9208-47cb-8005-1d27f24738c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.862057] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1074.862057] env[69992]: value = "task-2897221" [ 1074.862057] env[69992]: _type = "Task" [ 1074.862057] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.873107] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.018407] env[69992]: DEBUG oslo_vmware.api [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Task: {'id': task-2897220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164835} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.018674] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.019413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.019413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.019413] env[69992]: INFO nova.compute.manager [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1075.019567] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.019704] env[69992]: DEBUG nova.compute.manager [-] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1075.019801] env[69992]: DEBUG nova.network.neutron [-] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1075.192108] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54eddeb-e06c-4497-87b3-94e7d293a6c6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 69.209s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.193089] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 42.778s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.193318] env[69992]: INFO nova.compute.manager [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Unshelving [ 1075.239051] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.301469] env[69992]: DEBUG nova.compute.manager [req-9aab14f5-278a-40f1-aa36-fcf7ac43b777 req-ef5780d9-b760-401c-9906-7ff27663c190 service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Received event network-vif-deleted-66d406e0-6f68-43e7-ab80-d030bf95c7bb {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1075.301724] env[69992]: INFO nova.compute.manager [req-9aab14f5-278a-40f1-aa36-fcf7ac43b777 req-ef5780d9-b760-401c-9906-7ff27663c190 service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Neutron deleted interface 66d406e0-6f68-43e7-ab80-d030bf95c7bb; detaching it from the instance and deleting it from the info cache [ 1075.301846] env[69992]: DEBUG nova.network.neutron [req-9aab14f5-278a-40f1-aa36-fcf7ac43b777 req-ef5780d9-b760-401c-9906-7ff27663c190 service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.321886] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.322358] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a16478fa-8dd5-49b5-8773-941ad624b355 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.334021] env[69992]: DEBUG oslo_vmware.api [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1075.334021] env[69992]: value = "task-2897222" [ 1075.334021] env[69992]: _type = "Task" [ 1075.334021] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.343213] env[69992]: DEBUG oslo_vmware.api [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897222, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.373268] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066349} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.373544] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1075.374347] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40456b0-8715-4d99-9997-8fa8daf1dacb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.400110] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1075.402857] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a2f5330-4d04-4fbc-878e-e53169d20ad0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.425987] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1075.425987] env[69992]: value = "task-2897223" [ 1075.425987] env[69992]: _type = "Task" [ 1075.425987] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.436420] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.648875] env[69992]: DEBUG nova.network.neutron [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updated VIF entry in instance network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1075.649371] env[69992]: DEBUG nova.network.neutron [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.783295] env[69992]: DEBUG nova.network.neutron [-] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.805643] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88c32f3a-f2f2-452f-8475-d85cd5f74d08 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.818817] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe69615-4c57-4366-88fd-01327bd73c16 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.843700] env[69992]: DEBUG oslo_vmware.api [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897222, 'name': PowerOffVM_Task, 'duration_secs': 0.306279} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.844373] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.844373] env[69992]: DEBUG nova.compute.manager [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.845163] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3157239c-b596-454f-ab9d-40ad9e644a1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.873130] env[69992]: DEBUG nova.compute.manager [req-9aab14f5-278a-40f1-aa36-fcf7ac43b777 req-ef5780d9-b760-401c-9906-7ff27663c190 service nova] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Detach interface failed, port_id=66d406e0-6f68-43e7-ab80-d030bf95c7bb, reason: Instance efa06ccc-be20-4d0e-938f-01c91ef4de8e could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1075.953456] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.153808] env[69992]: DEBUG oslo_concurrency.lockutils [req-70374baa-573e-4242-ac10-6f3f81e9f228 req-04a94ef8-bcc1-48b9-b2a8-6916b52a5caa service nova] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.203337] env[69992]: DEBUG nova.compute.utils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1076.268312] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3c7315-a120-48ed-ad79-6799f8ae7a1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.279521] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46d966a-1053-45cf-bc60-fb984608395c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.285641] env[69992]: INFO nova.compute.manager [-] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Took 1.27 seconds to deallocate network for instance. [ 1076.334176] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27246da2-8945-4b04-8744-693a4c089bda {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.341543] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de64320-efe9-440a-9762-9958be41b605 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.357353] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.376582] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7980b062-93ad-45cb-b93c-e5d273fc8428 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.080s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.442092] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897223, 'name': ReconfigVM_Task, 'duration_secs': 0.779145} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.442523] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.443070] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-081d6b4f-9504-4521-b1a3-27eb8bec83d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.450366] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1076.450366] env[69992]: value = "task-2897224" [ 1076.450366] env[69992]: _type = "Task" [ 1076.450366] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.459325] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897224, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.707117] env[69992]: INFO nova.virt.block_device [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Booting with volume 964072e4-b1a4-47ae-8221-dfb900c2f8b1 at /dev/sdb [ 1076.747136] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fc22a181-0960-4233-8a9a-b8cb5bdf15bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.757253] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c6af3e-0204-4d70-a667-fb42ea2e4375 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.802130] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95de2fd6-98ac-4705-a91e-937abce6bd03 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.811996] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b6bb45-fdd2-477f-8d10-08ab4796626e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.856989] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.857971] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c8358e-4d72-4f7c-b963-9f031bf487ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.868669] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e7def2-ba99-4452-997e-c99e87746fe4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.878885] env[69992]: ERROR nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [req-c12c7b7c-a9d4-4293-b86f-545b972b138a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c12c7b7c-a9d4-4293-b86f-545b972b138a"}]} [ 1076.885610] env[69992]: DEBUG nova.virt.block_device [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating existing volume attachment record: cfc44e65-d20b-483c-b6a0-643728b589ce {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1076.912580] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1076.930047] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1076.930343] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.942157] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1076.958480] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1076.964133] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897224, 'name': Rename_Task, 'duration_secs': 0.375165} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.964410] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1076.964653] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f517043c-c35f-48c8-b4e4-fe8b30801a09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.971797] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1076.971797] env[69992]: value = "task-2897225" [ 1076.971797] env[69992]: _type = "Task" [ 1076.971797] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.983483] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.482508] env[69992]: DEBUG oslo_vmware.api [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897225, 'name': PowerOnVM_Task, 'duration_secs': 0.504207} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.485332] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1077.485637] env[69992]: DEBUG nova.compute.manager [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1077.486836] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a7c54b-4f19-46f6-943e-8aecbb3774fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.523410] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3103dbc-2340-481a-ad07-e2e99b068abe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.532378] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fb9ef0-b9df-4983-80e2-c4735565670f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.566914] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f0d97e-e5b8-43fe-9f38-36488a89f3cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.575259] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e3eb7c-9898-4198-a8d6-0190ce525a32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.591881] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1077.789517] env[69992]: DEBUG nova.compute.manager [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Stashing vm_state: stopped {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1078.005945] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.113550] env[69992]: ERROR nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [req-f6d75294-e022-4c10-92ba-ca6767666d31] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f6d75294-e022-4c10-92ba-ca6767666d31"}]} [ 1078.131034] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1078.145803] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1078.146116] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1078.158067] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1078.177507] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1078.311445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.670795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e2d07a-9b0d-4965-b060-80f706530804 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.676588] env[69992]: INFO nova.compute.manager [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Rebuilding instance [ 1078.681794] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce72f8f6-218f-45f0-be65-695bfd3069dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.715622] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e9a9ec-c9b9-4ab8-a881-eaa8cbbe3452 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.726991] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b5f051-1d03-4307-b00e-398f31b76bca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.741842] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1078.749775] env[69992]: DEBUG nova.compute.manager [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.750695] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb85f66-a645-4bbe-94a0-266fcf514c6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.280604] env[69992]: DEBUG nova.scheduler.client.report [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1079.280903] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 92 to 93 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1079.281169] env[69992]: DEBUG nova.compute.provider_tree [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1079.763803] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.764141] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a865bd03-98ff-4245-aefe-64629e29e5f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.774410] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1079.774410] env[69992]: value = "task-2897230" [ 1079.774410] env[69992]: _type = "Task" [ 1079.774410] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.783647] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.786762] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.108s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.787148] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1079.789979] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 45.223s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.790185] env[69992]: DEBUG nova.objects.instance [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1080.284282] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897230, 'name': PowerOffVM_Task, 'duration_secs': 0.213587} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.284572] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1080.284807] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.285605] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c015b0f8-6271-48cb-b909-b4e89d2fa09b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.292838] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1080.293124] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-721c2fdc-4025-48ce-a331-f63f660df0da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.295677] env[69992]: DEBUG nova.compute.utils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1080.299485] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1080.299654] env[69992]: DEBUG nova.network.neutron [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1080.339610] env[69992]: DEBUG nova.policy [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '312fc29022994913825ecfd425fcee2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea0f9171bc5c4034b8dbe9100bd6e007', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1080.375070] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1080.375194] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1080.375428] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleting the datastore file [datastore2] 27492ef7-8258-4001-b3b3-5bcb94e12c1f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.375714] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e7156b6-a074-43ea-9987-29a2246fb7b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.384912] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1080.384912] env[69992]: value = "task-2897232" [ 1080.384912] env[69992]: _type = "Task" [ 1080.384912] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.395800] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.643265] env[69992]: DEBUG nova.network.neutron [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Successfully created port: caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.803367] env[69992]: DEBUG nova.compute.utils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1080.805545] env[69992]: DEBUG oslo_concurrency.lockutils [None req-be5a3fa1-7947-4b9b-8591-1791283991c6 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.806531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.842s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.806754] env[69992]: DEBUG nova.objects.instance [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lazy-loading 'resources' on Instance uuid 714fafbf-a765-4e2c-8633-997d8244483c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.897383] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189175} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.897643] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.897825] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1080.897999] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1081.309615] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1081.766590] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af978523-5ef0-4989-89ef-fc56cd8f36d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.775353] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdfb79d-6e2b-40f5-94fa-888ea98a9bcb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.804577] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f905628-7564-4d3d-b01e-8fae30b65a99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.813469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0298326-94b2-4b78-be63-a9f5f162df25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.830430] env[69992]: DEBUG nova.compute.provider_tree [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1081.942299] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.942646] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.942867] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.943912] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.944329] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.944574] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.944824] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.944991] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.945183] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.945351] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.945784] env[69992]: DEBUG nova.virt.hardware [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.946661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1baa81c-fc42-49ec-810e-f0b97a6dcfdb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.963441] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9772cfe4-c369-418c-b4da-7530c387916a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.979318] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:85:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac2c9d07-ed01-47a9-88f1-562992bc1076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '617fc6d5-b33e-407b-8a59-8a6def94c1f4', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.988090] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.990585] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.991039] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98a1aa35-5f69-4c18-9768-cd53f8050c46 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.015999] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.015999] env[69992]: value = "task-2897233" [ 1082.015999] env[69992]: _type = "Task" [ 1082.015999] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.024991] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897233, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.155623] env[69992]: DEBUG nova.compute.manager [req-7b30ec32-f066-4ebd-b91e-32d6237bdff8 req-e2cbecb5-2fa2-48b1-8b07-dda10a102cd8 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Received event network-vif-plugged-caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.158560] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b30ec32-f066-4ebd-b91e-32d6237bdff8 req-e2cbecb5-2fa2-48b1-8b07-dda10a102cd8 service nova] Acquiring lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.158560] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b30ec32-f066-4ebd-b91e-32d6237bdff8 req-e2cbecb5-2fa2-48b1-8b07-dda10a102cd8 service nova] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.158560] env[69992]: DEBUG oslo_concurrency.lockutils [req-7b30ec32-f066-4ebd-b91e-32d6237bdff8 req-e2cbecb5-2fa2-48b1-8b07-dda10a102cd8 service nova] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.158560] env[69992]: DEBUG nova.compute.manager [req-7b30ec32-f066-4ebd-b91e-32d6237bdff8 req-e2cbecb5-2fa2-48b1-8b07-dda10a102cd8 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] No waiting events found dispatching network-vif-plugged-caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.158560] env[69992]: WARNING nova.compute.manager [req-7b30ec32-f066-4ebd-b91e-32d6237bdff8 req-e2cbecb5-2fa2-48b1-8b07-dda10a102cd8 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Received unexpected event network-vif-plugged-caeee1e6-2d7b-48fe-afa7-7b1525a95c86 for instance with vm_state building and task_state spawning. [ 1082.161247] env[69992]: DEBUG nova.network.neutron [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Successfully updated port: caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.325314] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1082.349113] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:45:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='327824600',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1943776578',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1082.349404] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1082.349566] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1082.349771] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1082.349937] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1082.350112] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1082.350424] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1082.350616] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1082.350796] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1082.350994] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1082.351235] env[69992]: DEBUG nova.virt.hardware [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1082.352162] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08141540-3ea2-4383-b9ba-192b1c757e64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.361449] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7880e6b9-b12b-4a73-9ebf-34876c981b87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.367785] env[69992]: DEBUG nova.scheduler.client.report [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1082.368093] env[69992]: DEBUG nova.compute.provider_tree [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 93 to 94 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1082.368251] env[69992]: DEBUG nova.compute.provider_tree [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1082.508230] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.526683] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897233, 'name': CreateVM_Task, 'duration_secs': 0.367088} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.526860] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.527564] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.527732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.528062] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1082.528321] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43cae2e3-9085-4256-9749-69450f269781 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.533548] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1082.533548] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523d4d6b-5169-eacf-5943-fa6bf85c372d" [ 1082.533548] env[69992]: _type = "Task" [ 1082.533548] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.542639] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523d4d6b-5169-eacf-5943-fa6bf85c372d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.664091] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.664261] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.664419] env[69992]: DEBUG nova.network.neutron [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.883225] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.076s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.885690] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.437s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.887189] env[69992]: INFO nova.compute.claims [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1082.900259] env[69992]: INFO nova.scheduler.client.report [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocations for instance 714fafbf-a765-4e2c-8633-997d8244483c [ 1083.044107] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523d4d6b-5169-eacf-5943-fa6bf85c372d, 'name': SearchDatastore_Task, 'duration_secs': 0.009998} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.044416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.044658] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.044896] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.045066] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.045252] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.045581] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5459b40b-f24c-4d3a-b34c-0c52f762e430 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.055281] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.055498] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.056171] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d68a7d4-b2a1-4301-af5f-d3eb3627477e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.061566] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1083.061566] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521a19eb-e101-b7ef-3fb1-2af29744a636" [ 1083.061566] env[69992]: _type = "Task" [ 1083.061566] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.069367] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521a19eb-e101-b7ef-3fb1-2af29744a636, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.205795] env[69992]: DEBUG nova.network.neutron [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.353229] env[69992]: DEBUG nova.network.neutron [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updating instance_info_cache with network_info: [{"id": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "address": "fa:16:3e:e7:8e:9a", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcaeee1e6-2d", "ovs_interfaceid": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.407859] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b1294bd6-6dc1-46ec-850f-1054f7ee022e tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "714fafbf-a765-4e2c-8633-997d8244483c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.629s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.573631] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521a19eb-e101-b7ef-3fb1-2af29744a636, 'name': SearchDatastore_Task, 'duration_secs': 0.008828} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.574454] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-181efe8c-8314-45b6-bd7b-5cd9a80e6bf4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.580967] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1083.580967] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52112984-6051-73e4-49c4-49800b6e4c23" [ 1083.580967] env[69992]: _type = "Task" [ 1083.580967] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.589758] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52112984-6051-73e4-49c4-49800b6e4c23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.856568] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Releasing lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.856929] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Instance network_info: |[{"id": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "address": "fa:16:3e:e7:8e:9a", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcaeee1e6-2d", "ovs_interfaceid": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.857405] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:8e:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'caeee1e6-2d7b-48fe-afa7-7b1525a95c86', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.865270] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.865512] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.865704] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e1107c1-387f-47b2-95e4-514bdcdd9b01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.886742] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.886742] env[69992]: value = "task-2897234" [ 1083.886742] env[69992]: _type = "Task" [ 1083.886742] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.898845] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897234, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.094602] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52112984-6051-73e4-49c4-49800b6e4c23, 'name': SearchDatastore_Task, 'duration_secs': 0.010961} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.097451] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.097731] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.098216] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad6c0bda-7cbf-4716-8657-a956c85b704b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.108099] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1084.108099] env[69992]: value = "task-2897235" [ 1084.108099] env[69992]: _type = "Task" [ 1084.108099] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.119560] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.189598] env[69992]: DEBUG nova.compute.manager [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Received event network-changed-caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.189802] env[69992]: DEBUG nova.compute.manager [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Refreshing instance network info cache due to event network-changed-caeee1e6-2d7b-48fe-afa7-7b1525a95c86. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1084.190063] env[69992]: DEBUG oslo_concurrency.lockutils [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] Acquiring lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.190153] env[69992]: DEBUG oslo_concurrency.lockutils [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] Acquired lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.190316] env[69992]: DEBUG nova.network.neutron [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Refreshing network info cache for port caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.400358] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897234, 'name': CreateVM_Task, 'duration_secs': 0.313719} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.403484] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.404538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.404766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.405197] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1084.405519] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03cc996e-05cf-4e5a-9d66-b4224b099ef0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.412217] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1084.412217] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52125a46-cece-bbc9-9d99-3fe316bcd5bd" [ 1084.412217] env[69992]: _type = "Task" [ 1084.412217] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.416962] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327a4ed0-c06a-408f-bf4c-f5b7dedfc414 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.429052] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a57e6f-7bda-4eb1-b5e4-aecb7daf5d16 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.432778] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52125a46-cece-bbc9-9d99-3fe316bcd5bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.462573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ad4b4b-e1d4-4e81-8ff2-bee02f31ef00 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.471447] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e07cdf9-360d-40e7-8004-ab8cc8d3940c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.486686] env[69992]: DEBUG nova.compute.provider_tree [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.622058] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897235, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458439} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.622058] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.622605] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.622874] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89ca8f93-19b7-4b61-a78e-ecac73d5d21c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.629815] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1084.629815] env[69992]: value = "task-2897236" [ 1084.629815] env[69992]: _type = "Task" [ 1084.629815] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.639713] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.877458] env[69992]: DEBUG nova.network.neutron [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updated VIF entry in instance network info cache for port caeee1e6-2d7b-48fe-afa7-7b1525a95c86. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.877829] env[69992]: DEBUG nova.network.neutron [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updating instance_info_cache with network_info: [{"id": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "address": "fa:16:3e:e7:8e:9a", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcaeee1e6-2d", "ovs_interfaceid": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.925100] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52125a46-cece-bbc9-9d99-3fe316bcd5bd, 'name': SearchDatastore_Task, 'duration_secs': 0.063244} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.925462] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.925697] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.925942] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.926077] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.926257] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.926511] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0bb9e64e-2a25-46c3-9b59-41f2eca1906a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.947189] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.947406] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.948141] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-602faa42-7536-4ca4-a617-1331b27f9e6f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.954398] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1084.954398] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c21c2d-48e9-5bf6-4950-76f0e6617bd7" [ 1084.954398] env[69992]: _type = "Task" [ 1084.954398] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.961948] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c21c2d-48e9-5bf6-4950-76f0e6617bd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.991140] env[69992]: DEBUG nova.scheduler.client.report [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.140408] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071818} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.140688] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.141467] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4c0139-88e9-4060-a0ef-4d1d0844b5be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.163988] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.164228] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1056431b-7f54-4c20-97c5-698c40eefaf5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.183212] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1085.183212] env[69992]: value = "task-2897237" [ 1085.183212] env[69992]: _type = "Task" [ 1085.183212] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.190498] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.380797] env[69992]: DEBUG oslo_concurrency.lockutils [req-f0914de5-f142-41bd-bdae-1870d5cd8c4b req-7bb62b6b-3261-4e05-b258-4fad7aa0490f service nova] Releasing lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.466073] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c21c2d-48e9-5bf6-4950-76f0e6617bd7, 'name': SearchDatastore_Task, 'duration_secs': 0.011882} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.466798] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be2aa92a-8da7-45a9-b88a-69d114e785a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.473031] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1085.473031] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52593ee9-5e62-0f7b-f368-66d3e245395c" [ 1085.473031] env[69992]: _type = "Task" [ 1085.473031] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.480749] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52593ee9-5e62-0f7b-f368-66d3e245395c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.497561] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.497731] env[69992]: DEBUG nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.500618] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.705s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.502409] env[69992]: INFO nova.compute.claims [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.694022] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897237, 'name': ReconfigVM_Task, 'duration_secs': 0.337517} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.694022] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f/27492ef7-8258-4001-b3b3-5bcb94e12c1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.694604] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-026fc9cf-ab44-4a4f-896c-c36d4f7d9acc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.702707] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1085.702707] env[69992]: value = "task-2897238" [ 1085.702707] env[69992]: _type = "Task" [ 1085.702707] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.711597] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897238, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.983706] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52593ee9-5e62-0f7b-f368-66d3e245395c, 'name': SearchDatastore_Task, 'duration_secs': 0.020786} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.984018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.984377] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e/4cd9fb91-44f1-4304-a2bf-c8b294b19e0e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.984683] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63caffc5-6bb5-4cf0-953c-819f9f711196 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.992297] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1085.992297] env[69992]: value = "task-2897239" [ 1085.992297] env[69992]: _type = "Task" [ 1085.992297] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.001833] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.007228] env[69992]: DEBUG nova.compute.utils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1086.010485] env[69992]: DEBUG nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1086.214735] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897238, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.502425] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456298} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.502624] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e/4cd9fb91-44f1-4304-a2bf-c8b294b19e0e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.502834] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.503103] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da035890-63ef-4fea-a4fa-8e8a425e7763 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.510054] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1086.510054] env[69992]: value = "task-2897240" [ 1086.510054] env[69992]: _type = "Task" [ 1086.510054] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.513340] env[69992]: DEBUG nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1086.524717] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.713497] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897238, 'name': Rename_Task, 'duration_secs': 0.895561} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.713497] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.713649] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b60febf4-bb2d-4a64-8742-279e496abed8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.721487] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1086.721487] env[69992]: value = "task-2897241" [ 1086.721487] env[69992]: _type = "Task" [ 1086.721487] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.733873] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.991598] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58029740-c89e-4523-a974-c100cd0ea079 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.000791] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955238cb-ef68-45e5-ac0f-9c7c44bb6c33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.035798] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4805b12c-ef0e-4396-bf87-9d8ac164d2e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.046114] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06437} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.047888] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1087.048711] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251be565-83d5-44da-97ba-b991bd087710 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.052438] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb8b74-7604-4bde-9016-3861e5f3e520 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.077061] env[69992]: DEBUG nova.compute.provider_tree [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.087818] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e/4cd9fb91-44f1-4304-a2bf-c8b294b19e0e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.089362] env[69992]: DEBUG nova.scheduler.client.report [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.092566] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29f7f232-5f7a-4b6c-852e-832762081c74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.107985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.607s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.108559] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1087.111110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.040s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.111351] env[69992]: DEBUG nova.objects.instance [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lazy-loading 'resources' on Instance uuid 2b1a0943-d59a-441d-a2e6-8149106803b6 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.122018] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1087.122018] env[69992]: value = "task-2897242" [ 1087.122018] env[69992]: _type = "Task" [ 1087.122018] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.129590] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897242, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.233601] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.539971] env[69992]: DEBUG nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1087.568544] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1087.568851] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1087.569093] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1087.569378] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1087.569552] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1087.569705] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1087.570048] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1087.570323] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1087.570571] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1087.570833] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1087.571080] env[69992]: DEBUG nova.virt.hardware [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1087.571970] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b588c0c2-b30a-4962-9b0d-b40e323e359a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.581238] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa0b5bf-b928-4e6d-b58f-2c620b9a337e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.595266] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.600798] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Creating folder: Project (d5c4153909ce49dfae0a89156d25c156). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1087.601121] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eabd21e0-e52e-493b-a6eb-7cc0a2ac174a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.613865] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Created folder: Project (d5c4153909ce49dfae0a89156d25c156) in parent group-v581821. [ 1087.614194] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Creating folder: Instances. Parent ref: group-v581993. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1087.615708] env[69992]: DEBUG nova.compute.utils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1087.620056] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a38676a2-4dec-4cd4-aeb2-3f4071d2a5ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.622120] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1087.622551] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1087.635244] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897242, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.637686] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Created folder: Instances in parent group-v581993. [ 1087.637957] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.638169] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.638388] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6081c711-5474-48cf-8e36-72e2cbc96c33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.659406] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.659406] env[69992]: value = "task-2897245" [ 1087.659406] env[69992]: _type = "Task" [ 1087.659406] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.663809] env[69992]: DEBUG nova.policy [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94516a830322454c90c08043118e547a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6090a2d03daf46e9b687d24fde64fb72', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1087.674737] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897245, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.736043] env[69992]: DEBUG oslo_vmware.api [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897241, 'name': PowerOnVM_Task, 'duration_secs': 0.86636} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.736345] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.736553] env[69992]: DEBUG nova.compute.manager [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.737369] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4866e50-a4a1-4215-be08-f08a85394098 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.957629] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Successfully created port: 64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.122646] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1088.137535] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897242, 'name': ReconfigVM_Task, 'duration_secs': 0.55652} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.137817] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e/4cd9fb91-44f1-4304-a2bf-c8b294b19e0e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.138154] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69992) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1088.138820] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-d6da1048-2d40-4a44-8f08-f0a8a76d7ebc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.151026] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1088.151026] env[69992]: value = "task-2897246" [ 1088.151026] env[69992]: _type = "Task" [ 1088.151026] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.164250] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897246, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.177063] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897245, 'name': CreateVM_Task, 'duration_secs': 0.317022} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.177962] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.178444] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.178605] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.178964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.179481] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e8fb49b-ca0e-4c13-a3f6-0d8b2762b992 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.188165] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1088.188165] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5242cd69-0727-2288-00d2-ff494d3fc8c6" [ 1088.188165] env[69992]: _type = "Task" [ 1088.188165] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.195583] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652f3171-4b4a-4a80-b912-329b3431d889 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.205228] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5242cd69-0727-2288-00d2-ff494d3fc8c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.207671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f533832a-5c52-4f68-bb34-019d1fed4642 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.239978] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d2f810-ee02-4aa7-ae1f-8578889a7689 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.248187] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0eae676-d192-4a99-a52e-a889fcf71da3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.262285] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.270794] env[69992]: DEBUG nova.compute.provider_tree [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.657576] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897246, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.052944} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.657833] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69992) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1088.658613] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27037727-693e-4773-b107-5267c94be410 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.683444] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e/ephemeral_0.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.684491] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a6b8073-ddad-4ce9-aa19-973ca619f9a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.709809] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5242cd69-0727-2288-00d2-ff494d3fc8c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009729} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.711602] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.712069] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.712069] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.712674] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.712674] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.712935] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1088.712935] env[69992]: value = "task-2897247" [ 1088.712935] env[69992]: _type = "Task" [ 1088.712935] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.713185] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ddf998d-5927-41e0-ac9e-21f04c512a12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.725657] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897247, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.726928] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.727118] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.728020] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30eb774b-6242-400b-870c-d364845f9647 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.733323] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1088.733323] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a8db3c-a569-c46e-484c-933566b50f6e" [ 1088.733323] env[69992]: _type = "Task" [ 1088.733323] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.743293] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a8db3c-a569-c46e-484c-933566b50f6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.776299] env[69992]: DEBUG nova.scheduler.client.report [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.138070] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1089.165091] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.165336] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.165494] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.165680] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.165830] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.165980] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.166211] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.166371] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.166540] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.166703] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.166878] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.167798] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37288a4d-5580-4fd9-bd4e-da57038d2d99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.176701] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add0740e-2c55-45be-bef1-b7c98564335f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.210865] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "673be00f-e3c5-4a54-beeb-cf89828e9e32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.211190] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.211404] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "673be00f-e3c5-4a54-beeb-cf89828e9e32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.211589] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.211757] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.214273] env[69992]: INFO nova.compute.manager [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Terminating instance [ 1089.226895] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897247, 'name': ReconfigVM_Task, 'duration_secs': 0.306922} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.227886] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e/ephemeral_0.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.228574] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d67f789-760e-4cfb-a0a7-0c97f7a59025 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.239629] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1089.239629] env[69992]: value = "task-2897248" [ 1089.239629] env[69992]: _type = "Task" [ 1089.239629] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.246561] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a8db3c-a569-c46e-484c-933566b50f6e, 'name': SearchDatastore_Task, 'duration_secs': 0.010997} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.247945] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d4855a2-578d-4035-b2d0-d45558aa4c66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.253172] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897248, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.256791] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1089.256791] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5244779a-80e7-6c07-3f1d-5f479ece2e11" [ 1089.256791] env[69992]: _type = "Task" [ 1089.256791] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.265730] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5244779a-80e7-6c07-3f1d-5f479ece2e11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.281807] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.284443] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 45.431s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.284443] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.284604] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1089.284950] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.045s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.285218] env[69992]: DEBUG nova.objects.instance [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lazy-loading 'resources' on Instance uuid b3d62400-e639-4c49-9207-64fd1e684f99 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.287163] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df80117a-2f38-4300-a07f-5f6e9a6c627e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.298596] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e445dde0-1521-479b-9493-0dc5e1e1d749 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.306982] env[69992]: INFO nova.scheduler.client.report [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted allocations for instance 2b1a0943-d59a-441d-a2e6-8149106803b6 [ 1089.321034] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05cfa08-b16d-4ae0-8d8b-71dd5e02002d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.329866] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97b372d-45a5-412b-ad4b-d86579fedea1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.364065] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177799MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1089.364229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.546204] env[69992]: DEBUG nova.compute.manager [req-120f7382-a00c-4280-b027-98db0a3b2f9a req-2bf5539b-b611-4730-a80e-46edec1f71ff service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Received event network-vif-plugged-64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.546399] env[69992]: DEBUG oslo_concurrency.lockutils [req-120f7382-a00c-4280-b027-98db0a3b2f9a req-2bf5539b-b611-4730-a80e-46edec1f71ff service nova] Acquiring lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.547108] env[69992]: DEBUG oslo_concurrency.lockutils [req-120f7382-a00c-4280-b027-98db0a3b2f9a req-2bf5539b-b611-4730-a80e-46edec1f71ff service nova] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.547206] env[69992]: DEBUG oslo_concurrency.lockutils [req-120f7382-a00c-4280-b027-98db0a3b2f9a req-2bf5539b-b611-4730-a80e-46edec1f71ff service nova] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.547354] env[69992]: DEBUG nova.compute.manager [req-120f7382-a00c-4280-b027-98db0a3b2f9a req-2bf5539b-b611-4730-a80e-46edec1f71ff service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] No waiting events found dispatching network-vif-plugged-64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.547551] env[69992]: WARNING nova.compute.manager [req-120f7382-a00c-4280-b027-98db0a3b2f9a req-2bf5539b-b611-4730-a80e-46edec1f71ff service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Received unexpected event network-vif-plugged-64c65906-7b18-4487-a141-432f0ac29177 for instance with vm_state building and task_state spawning. [ 1089.628116] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Successfully updated port: 64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1089.720837] env[69992]: DEBUG nova.compute.manager [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.721135] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.722064] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371f8e17-f8f5-4260-a6f2-ce905cbe1548 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.730746] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.730992] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63a01015-efd0-4cd1-a187-adc0d1b3f0fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.741770] env[69992]: DEBUG oslo_vmware.api [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1089.741770] env[69992]: value = "task-2897249" [ 1089.741770] env[69992]: _type = "Task" [ 1089.741770] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.753646] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897248, 'name': Rename_Task, 'duration_secs': 0.169862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.756897] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1089.757249] env[69992]: DEBUG oslo_vmware.api [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.757563] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ab1575d-8f95-4e79-9f0b-71526717207e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.770017] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5244779a-80e7-6c07-3f1d-5f479ece2e11, 'name': SearchDatastore_Task, 'duration_secs': 0.012743} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.771304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.771580] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1089.771905] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1089.771905] env[69992]: value = "task-2897250" [ 1089.771905] env[69992]: _type = "Task" [ 1089.771905] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.772105] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5da406e2-dbd1-482f-89ff-142181a0bc52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.781607] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.782792] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1089.782792] env[69992]: value = "task-2897251" [ 1089.782792] env[69992]: _type = "Task" [ 1089.782792] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.791334] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.823946] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9eacb5d1-64b2-4d4e-bb5f-7862046619ea tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "2b1a0943-d59a-441d-a2e6-8149106803b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.836s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.130657] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "refresh_cache-131096fc-addf-4d9a-9cd7-4abe98aabd1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.130755] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "refresh_cache-131096fc-addf-4d9a-9cd7-4abe98aabd1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.131146] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1090.258246] env[69992]: DEBUG oslo_vmware.api [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897249, 'name': PowerOffVM_Task, 'duration_secs': 0.212143} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.261382] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.261605] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.262120] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42a06487-6982-455e-aa4d-2abdfb380862 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.292175] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897250, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.300975] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897251, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.373067] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c6f613-1d34-476c-8759-ce2dcc24bca5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.381333] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4197d0bc-0ea9-4be0-9d1d-7cdc4c69ab9d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.414591] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719b91de-f70b-4efc-b416-9dc710142f7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.423431] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363c5333-b47a-4285-b645-02ca7e3c4474 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.438491] env[69992]: DEBUG nova.compute.provider_tree [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1090.595099] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.595367] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.595562] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleting the datastore file [datastore2] 673be00f-e3c5-4a54-beeb-cf89828e9e32 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.595851] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cab1ae1-36fc-4055-aa74-d68a5f3f1f97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.603385] env[69992]: DEBUG oslo_vmware.api [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1090.603385] env[69992]: value = "task-2897253" [ 1090.603385] env[69992]: _type = "Task" [ 1090.603385] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.611090] env[69992]: DEBUG oslo_vmware.api [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.665219] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.789383] env[69992]: DEBUG oslo_vmware.api [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897250, 'name': PowerOnVM_Task, 'duration_secs': 0.530163} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.792755] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1090.792982] env[69992]: INFO nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1090.793212] env[69992]: DEBUG nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1090.794098] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9674232b-70e9-43ef-8d7d-e8ff2f729380 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.808031] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61351} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.809283] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Updating instance_info_cache with network_info: [{"id": "64c65906-7b18-4487-a141-432f0ac29177", "address": "fa:16:3e:2c:75:bd", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64c65906-7b", "ovs_interfaceid": "64c65906-7b18-4487-a141-432f0ac29177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.810501] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.810740] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.811331] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab344082-2365-47f0-b861-b3f5e48dc0e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.818751] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1090.818751] env[69992]: value = "task-2897254" [ 1090.818751] env[69992]: _type = "Task" [ 1090.818751] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.829864] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.976876] env[69992]: DEBUG nova.scheduler.client.report [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1090.977260] env[69992]: DEBUG nova.compute.provider_tree [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 94 to 95 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1090.977471] env[69992]: DEBUG nova.compute.provider_tree [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1091.114255] env[69992]: DEBUG oslo_vmware.api [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150875} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.114520] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.114706] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.114979] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.115212] env[69992]: INFO nova.compute.manager [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Took 1.39 seconds to destroy the instance on the hypervisor. [ 1091.115464] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.115658] env[69992]: DEBUG nova.compute.manager [-] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.115776] env[69992]: DEBUG nova.network.neutron [-] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.317028] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "refresh_cache-131096fc-addf-4d9a-9cd7-4abe98aabd1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.317028] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Instance network_info: |[{"id": "64c65906-7b18-4487-a141-432f0ac29177", "address": "fa:16:3e:2c:75:bd", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64c65906-7b", "ovs_interfaceid": "64c65906-7b18-4487-a141-432f0ac29177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1091.317918] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:75:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64c65906-7b18-4487-a141-432f0ac29177', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1091.327200] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Creating folder: Project (6090a2d03daf46e9b687d24fde64fb72). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1091.328027] env[69992]: INFO nova.compute.manager [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Took 62.64 seconds to build instance. [ 1091.329399] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8c0233e-a26e-4e9b-897c-b28ecf094314 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.343333] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.298144} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.343801] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.344920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66552ba4-cc08-44fc-b553-6e3f4e8726ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.351215] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Created folder: Project (6090a2d03daf46e9b687d24fde64fb72) in parent group-v581821. [ 1091.351215] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Creating folder: Instances. Parent ref: group-v581996. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1091.353262] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9742e99f-b687-44ac-995d-48e3067fd779 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.376609] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.377464] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f71129d-a6ac-4805-9786-3dda03dd88e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.396015] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Created folder: Instances in parent group-v581996. [ 1091.396354] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.396999] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1091.397308] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c4c18c6-2b93-404b-8cea-279da2ba51e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.415064] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1091.415064] env[69992]: value = "task-2897257" [ 1091.415064] env[69992]: _type = "Task" [ 1091.415064] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.423535] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1091.423535] env[69992]: value = "task-2897258" [ 1091.423535] env[69992]: _type = "Task" [ 1091.423535] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.426849] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897257, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.437158] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897258, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.486032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.487897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.867s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.490860] env[69992]: INFO nova.compute.claims [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.527727] env[69992]: INFO nova.scheduler.client.report [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Deleted allocations for instance b3d62400-e639-4c49-9207-64fd1e684f99 [ 1091.781470] env[69992]: DEBUG nova.compute.manager [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Received event network-changed-64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1091.781758] env[69992]: DEBUG nova.compute.manager [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Refreshing instance network info cache due to event network-changed-64c65906-7b18-4487-a141-432f0ac29177. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1091.781906] env[69992]: DEBUG oslo_concurrency.lockutils [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] Acquiring lock "refresh_cache-131096fc-addf-4d9a-9cd7-4abe98aabd1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.784259] env[69992]: DEBUG oslo_concurrency.lockutils [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] Acquired lock "refresh_cache-131096fc-addf-4d9a-9cd7-4abe98aabd1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.784259] env[69992]: DEBUG nova.network.neutron [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Refreshing network info cache for port 64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1091.834313] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e57dcbc1-d2b1-4368-99f0-2e2899a62fb6 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.918s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.928905] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897257, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.936709] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897258, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.998440] env[69992]: DEBUG nova.network.neutron [-] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.035823] env[69992]: DEBUG oslo_concurrency.lockutils [None req-37b7c55d-239a-4439-be1b-a01522395c57 tempest-ImagesNegativeTestJSON-2135729399 tempest-ImagesNegativeTestJSON-2135729399-project-member] Lock "b3d62400-e639-4c49-9207-64fd1e684f99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.395s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.205862] env[69992]: DEBUG nova.compute.manager [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Received event network-changed-caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.206088] env[69992]: DEBUG nova.compute.manager [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Refreshing instance network info cache due to event network-changed-caeee1e6-2d7b-48fe-afa7-7b1525a95c86. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1092.206329] env[69992]: DEBUG oslo_concurrency.lockutils [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] Acquiring lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.206476] env[69992]: DEBUG oslo_concurrency.lockutils [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] Acquired lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.206640] env[69992]: DEBUG nova.network.neutron [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Refreshing network info cache for port caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.338034] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1092.430910] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897257, 'name': ReconfigVM_Task, 'duration_secs': 0.791941} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.434179] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.434781] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a485ca5-5158-4ec6-b3fc-99c0ca37f1ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.442956] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897258, 'name': CreateVM_Task, 'duration_secs': 0.742084} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.444073] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1092.444393] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1092.444393] env[69992]: value = "task-2897259" [ 1092.444393] env[69992]: _type = "Task" [ 1092.444393] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.444999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.445171] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.445474] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1092.445760] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b87b80-f6da-4e6b-a3c8-36d27ff3a3af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.458037] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1092.458037] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5283dcde-3538-a2ef-2f05-5425bc8ea2e5" [ 1092.458037] env[69992]: _type = "Task" [ 1092.458037] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.467875] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5283dcde-3538-a2ef-2f05-5425bc8ea2e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.504531] env[69992]: INFO nova.compute.manager [-] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Took 1.39 seconds to deallocate network for instance. [ 1092.517035] env[69992]: DEBUG nova.network.neutron [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Updated VIF entry in instance network info cache for port 64c65906-7b18-4487-a141-432f0ac29177. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1092.517150] env[69992]: DEBUG nova.network.neutron [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Updating instance_info_cache with network_info: [{"id": "64c65906-7b18-4487-a141-432f0ac29177", "address": "fa:16:3e:2c:75:bd", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64c65906-7b", "ovs_interfaceid": "64c65906-7b18-4487-a141-432f0ac29177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.861539] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.958412] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897259, 'name': Rename_Task, 'duration_secs': 0.446913} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.962466] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.964089] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-314c7180-34bb-4230-91d4-c1c8543220a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.981881] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5283dcde-3538-a2ef-2f05-5425bc8ea2e5, 'name': SearchDatastore_Task, 'duration_secs': 0.012388} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.982567] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.982842] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.983111] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.987170] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.987170] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.987170] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1092.987170] env[69992]: value = "task-2897260" [ 1092.987170] env[69992]: _type = "Task" [ 1092.987170] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.989465] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1de9d82a-6a36-491a-861e-58d93b0fe497 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.003502] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.007916] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.011423] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.011423] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9669495c-1619-4217-a418-5deaf60bfe4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.013687] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.015778] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1093.015778] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d36309-7d16-4551-8352-cb630f4ae0ea" [ 1093.015778] env[69992]: _type = "Task" [ 1093.015778] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.019513] env[69992]: DEBUG oslo_concurrency.lockutils [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] Releasing lock "refresh_cache-131096fc-addf-4d9a-9cd7-4abe98aabd1f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.019771] env[69992]: DEBUG nova.compute.manager [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Received event network-vif-deleted-3479d475-b805-49db-a031-c31a6724c10d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.020017] env[69992]: INFO nova.compute.manager [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Neutron deleted interface 3479d475-b805-49db-a031-c31a6724c10d; detaching it from the instance and deleting it from the info cache [ 1093.020277] env[69992]: DEBUG nova.network.neutron [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.030893] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d36309-7d16-4551-8352-cb630f4ae0ea, 'name': SearchDatastore_Task, 'duration_secs': 0.011846} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.031753] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09bb5c65-3577-402b-a244-ab0a283f0082 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.038061] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1093.038061] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5207cb04-e3b9-99df-838d-16551d288946" [ 1093.038061] env[69992]: _type = "Task" [ 1093.038061] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.047346] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5207cb04-e3b9-99df-838d-16551d288946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.066038] env[69992]: DEBUG nova.network.neutron [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updated VIF entry in instance network info cache for port caeee1e6-2d7b-48fe-afa7-7b1525a95c86. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1093.066427] env[69992]: DEBUG nova.network.neutron [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updating instance_info_cache with network_info: [{"id": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "address": "fa:16:3e:e7:8e:9a", "network": {"id": "b6ad7b53-72db-475c-a28d-2b0c8da19818", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1429555593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea0f9171bc5c4034b8dbe9100bd6e007", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcaeee1e6-2d", "ovs_interfaceid": "caeee1e6-2d7b-48fe-afa7-7b1525a95c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.080017] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5252ee49-b411-455a-a9b2-aba5ea687684 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.090088] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78708bce-1593-486c-8338-8ab2482d0aed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.125500] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5e453d-5fbd-42a3-8ddb-e736964ebb71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.134804] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ff6c57-f62f-412a-bd3d-c2af878c7dd2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.150024] env[69992]: DEBUG nova.compute.provider_tree [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.502014] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897260, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.526883] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e17326c8-dc79-4a7e-ab38-1f44bc7e4ba1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.537093] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadbb9ad-de08-4f2f-92e1-977d2b53b853 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.558016] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5207cb04-e3b9-99df-838d-16551d288946, 'name': SearchDatastore_Task, 'duration_secs': 0.011134} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.558300] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.558562] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 131096fc-addf-4d9a-9cd7-4abe98aabd1f/131096fc-addf-4d9a-9cd7-4abe98aabd1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1093.558824] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8d9b5c2-c811-4564-914f-2428c0891f93 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.580941] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1093.580941] env[69992]: value = "task-2897261" [ 1093.580941] env[69992]: _type = "Task" [ 1093.580941] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.581637] env[69992]: DEBUG oslo_concurrency.lockutils [req-e099e20e-f494-4f5a-b3c1-774151cfaa79 req-1f3e9b07-a9b7-4aa8-87c2-f34e0e46788b service nova] Releasing lock "refresh_cache-4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.581919] env[69992]: DEBUG nova.compute.manager [req-a4fa2e14-8abe-4de2-85f1-4f89642e5c37 req-b092c68a-92d9-4077-a942-19628dd00a5b service nova] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Detach interface failed, port_id=3479d475-b805-49db-a031-c31a6724c10d, reason: Instance 673be00f-e3c5-4a54-beeb-cf89828e9e32 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1093.590890] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.653259] env[69992]: DEBUG nova.scheduler.client.report [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.004538] env[69992]: DEBUG oslo_vmware.api [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897260, 'name': PowerOnVM_Task, 'duration_secs': 0.812398} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.004978] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.005153] env[69992]: INFO nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Took 6.46 seconds to spawn the instance on the hypervisor. [ 1094.005395] env[69992]: DEBUG nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.006302] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6d5713-0f06-4adf-bb45-f87007446536 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.174371] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50448} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.174371] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 131096fc-addf-4d9a-9cd7-4abe98aabd1f/131096fc-addf-4d9a-9cd7-4abe98aabd1f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.174371] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.174371] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe5632db-05a2-4a58-a5ec-dcf6e86c073d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.174371] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1094.174371] env[69992]: value = "task-2897262" [ 1094.174371] env[69992]: _type = "Task" [ 1094.174371] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.174371] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.174371] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.174371] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1094.174371] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.012s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.174371] env[69992]: INFO nova.compute.claims [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1094.526255] env[69992]: INFO nova.compute.manager [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Took 53.10 seconds to build instance. [ 1094.617151] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082155} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.617429] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1094.618241] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8029fe9e-26ae-48d8-b592-949121fa905d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.642359] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 131096fc-addf-4d9a-9cd7-4abe98aabd1f/131096fc-addf-4d9a-9cd7-4abe98aabd1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1094.642635] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b96f63d-1588-473e-9130-2081bbb17f67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.664589] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1094.664589] env[69992]: value = "task-2897263" [ 1094.664589] env[69992]: _type = "Task" [ 1094.664589] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.671143] env[69992]: DEBUG nova.compute.utils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1094.672736] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1094.672977] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.684430] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897263, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.722206] env[69992]: DEBUG nova.policy [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94516a830322454c90c08043118e547a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6090a2d03daf46e9b687d24fde64fb72', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1095.005195] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Successfully created port: ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.029563] env[69992]: DEBUG oslo_concurrency.lockutils [None req-81592244-48ae-4940-89ad-0cf7e731c248 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.131s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.180446] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1095.183111] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.641900] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6277b014-d40b-4892-9f3d-b334986ccc3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.649946] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebde8158-168a-4145-9b08-70a3c54e6428 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.685727] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f2a4a9-09af-4584-847e-6d90792c1418 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.697694] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897263, 'name': ReconfigVM_Task, 'duration_secs': 0.846271} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.699737] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 131096fc-addf-4d9a-9cd7-4abe98aabd1f/131096fc-addf-4d9a-9cd7-4abe98aabd1f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.700368] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03256271-8783-42c0-acba-caa045b7d1b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.702765] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff75204-464a-4235-802c-e57e91098ff7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.717871] env[69992]: DEBUG nova.compute.provider_tree [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.720372] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1095.720372] env[69992]: value = "task-2897264" [ 1095.720372] env[69992]: _type = "Task" [ 1095.720372] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.730240] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897264, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.908877] env[69992]: INFO nova.compute.manager [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Rebuilding instance [ 1095.945842] env[69992]: DEBUG nova.compute.manager [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.946716] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95687802-01e0-4089-96a2-1b52fdbf5cf8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.194109] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1096.221102] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1096.221357] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1096.221522] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1096.221718] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1096.221892] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1096.222065] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1096.222283] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1096.222444] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1096.222614] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1096.222787] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1096.222959] env[69992]: DEBUG nova.virt.hardware [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1096.226541] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc30c9f-91c6-4bc7-806e-f5e647a2fad7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.246199] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897264, 'name': Rename_Task, 'duration_secs': 0.405447} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.249013] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.250410] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7d9233e-070e-4dd4-8163-2289c3b726f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.255308] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f193368-e817-4061-b112-e8c4769af3ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.274484] env[69992]: DEBUG nova.scheduler.client.report [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1096.274742] env[69992]: DEBUG nova.compute.provider_tree [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 95 to 96 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1096.274923] env[69992]: DEBUG nova.compute.provider_tree [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1096.278353] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1096.278353] env[69992]: value = "task-2897265" [ 1096.278353] env[69992]: _type = "Task" [ 1096.278353] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.288391] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.460892] env[69992]: DEBUG nova.compute.manager [req-c17500d5-2943-4d96-b70d-3b3757900441 req-b720c87b-c25a-4e4f-ba2c-c0c92eeee437 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Received event network-vif-plugged-ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.461180] env[69992]: DEBUG oslo_concurrency.lockutils [req-c17500d5-2943-4d96-b70d-3b3757900441 req-b720c87b-c25a-4e4f-ba2c-c0c92eeee437 service nova] Acquiring lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.461371] env[69992]: DEBUG oslo_concurrency.lockutils [req-c17500d5-2943-4d96-b70d-3b3757900441 req-b720c87b-c25a-4e4f-ba2c-c0c92eeee437 service nova] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.461599] env[69992]: DEBUG oslo_concurrency.lockutils [req-c17500d5-2943-4d96-b70d-3b3757900441 req-b720c87b-c25a-4e4f-ba2c-c0c92eeee437 service nova] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.461701] env[69992]: DEBUG nova.compute.manager [req-c17500d5-2943-4d96-b70d-3b3757900441 req-b720c87b-c25a-4e4f-ba2c-c0c92eeee437 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] No waiting events found dispatching network-vif-plugged-ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.461888] env[69992]: WARNING nova.compute.manager [req-c17500d5-2943-4d96-b70d-3b3757900441 req-b720c87b-c25a-4e4f-ba2c-c0c92eeee437 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Received unexpected event network-vif-plugged-ae2c507f-1ebf-4321-a3b8-bd98e024886f for instance with vm_state building and task_state spawning. [ 1096.556643] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Successfully updated port: ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.780742] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.781327] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1096.784866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.575s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.784866] env[69992]: DEBUG nova.objects.instance [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lazy-loading 'resources' on Instance uuid 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.795417] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897265, 'name': PowerOnVM_Task, 'duration_secs': 0.522584} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.796159] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1096.796275] env[69992]: INFO nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1096.796429] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1096.797235] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10c3699-8745-414f-832f-772c5856c97b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.963956] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.964109] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e626e400-5e2a-42b0-a09d-f9715af7b191 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.972663] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1096.972663] env[69992]: value = "task-2897266" [ 1096.972663] env[69992]: _type = "Task" [ 1096.972663] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.983508] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.059504] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "refresh_cache-086ac14d-74bb-4bb6-90b3-3e345b2894a9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.059672] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "refresh_cache-086ac14d-74bb-4bb6-90b3-3e345b2894a9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.059904] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.293933] env[69992]: DEBUG nova.compute.utils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1097.295443] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1097.295612] env[69992]: DEBUG nova.network.neutron [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.318179] env[69992]: INFO nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Took 55.55 seconds to build instance. [ 1097.362298] env[69992]: DEBUG nova.policy [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '461a16451840440a86fa85e586077d52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bb75ccd37b3415a8837de260e0886c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1097.483842] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897266, 'name': PowerOffVM_Task, 'duration_secs': 0.259435} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.484122] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.484359] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.485124] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41d9409-e609-422f-b714-f9963ffc0581 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.494317] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.494563] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53291e7c-02b9-4f7b-b2f7-dd57fadbd015 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.524111] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.524333] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.524510] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Deleting the datastore file [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.524796] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41e84f5e-aa3c-46d2-9904-b38185349248 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.532370] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1097.532370] env[69992]: value = "task-2897268" [ 1097.532370] env[69992]: _type = "Task" [ 1097.532370] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.541211] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.598016] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1097.638567] env[69992]: DEBUG nova.network.neutron [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Successfully created port: f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1097.798893] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1097.821064] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.351s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.824192] env[69992]: DEBUG nova.network.neutron [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Updating instance_info_cache with network_info: [{"id": "ae2c507f-1ebf-4321-a3b8-bd98e024886f", "address": "fa:16:3e:87:d0:08", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae2c507f-1e", "ovs_interfaceid": "ae2c507f-1ebf-4321-a3b8-bd98e024886f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.934197] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3c8150-6eb8-4f70-adf6-9ae3409d3fe3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.942140] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6180cc-b039-4ab7-9cf2-48a759aa639e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.975052] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4483a2-c17d-4bb3-964f-258049eda91e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.983487] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd58b103-2fb7-41eb-8158-439931848d43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.999025] env[69992]: DEBUG nova.compute.provider_tree [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.043140] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101566} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.043389] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.043577] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.043754] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.327375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "refresh_cache-086ac14d-74bb-4bb6-90b3-3e345b2894a9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.327723] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Instance network_info: |[{"id": "ae2c507f-1ebf-4321-a3b8-bd98e024886f", "address": "fa:16:3e:87:d0:08", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae2c507f-1e", "ovs_interfaceid": "ae2c507f-1ebf-4321-a3b8-bd98e024886f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1098.328173] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:d0:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae2c507f-1ebf-4321-a3b8-bd98e024886f', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.336079] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.336319] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.337368] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59c419ba-da97-47b8-8866-3c7de99ecc13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.360887] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.360887] env[69992]: value = "task-2897269" [ 1098.360887] env[69992]: _type = "Task" [ 1098.360887] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.369212] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897269, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.486482] env[69992]: DEBUG nova.compute.manager [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Received event network-changed-ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.486752] env[69992]: DEBUG nova.compute.manager [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Refreshing instance network info cache due to event network-changed-ae2c507f-1ebf-4321-a3b8-bd98e024886f. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1098.487056] env[69992]: DEBUG oslo_concurrency.lockutils [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] Acquiring lock "refresh_cache-086ac14d-74bb-4bb6-90b3-3e345b2894a9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.487291] env[69992]: DEBUG oslo_concurrency.lockutils [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] Acquired lock "refresh_cache-086ac14d-74bb-4bb6-90b3-3e345b2894a9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.487526] env[69992]: DEBUG nova.network.neutron [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Refreshing network info cache for port ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.502888] env[69992]: DEBUG nova.scheduler.client.report [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.808891] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1098.834454] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.834697] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.834883] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.835089] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.835241] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.835389] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.835594] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.835858] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.836063] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.836229] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.836919] env[69992]: DEBUG nova.virt.hardware [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.837261] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50121b85-a091-46f0-9b62-85aef54f2c8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.845740] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f1c92d-fb79-4441-b01a-502dcba1556f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.869547] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897269, 'name': CreateVM_Task, 'duration_secs': 0.336012} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.869710] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.870377] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.870548] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.870881] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1098.871142] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e046f6c-02b7-41b2-a12a-a929e2fa81bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.876262] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1098.876262] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526cd4d7-aeb4-d6fb-98df-d94c3675d96b" [ 1098.876262] env[69992]: _type = "Task" [ 1098.876262] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.884845] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526cd4d7-aeb4-d6fb-98df-d94c3675d96b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.008259] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.010540] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 43.369s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.031059] env[69992]: INFO nova.scheduler.client.report [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Deleted allocations for instance 98cd0eb8-d17a-4a9b-a172-1ba1207168d0 [ 1099.081155] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.081388] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.081547] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.081836] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.081992] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.082156] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.082368] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.082520] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.083061] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.083134] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.083466] env[69992]: DEBUG nova.virt.hardware [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.084845] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01382cf-0a83-43b9-b5a1-6b0525b6472f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.097302] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e1c540-16f1-47ee-a1fe-0a675e7bbf1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.114906] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1099.120823] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1099.121691] env[69992]: DEBUG nova.network.neutron [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Successfully updated port: f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1099.122811] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1099.125327] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc420689-bd83-498f-9a90-7f29f2a6d2b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.139662] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "refresh_cache-32bdb15d-6a4d-4445-9b82-d18b0f6743b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.139809] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired lock "refresh_cache-32bdb15d-6a4d-4445-9b82-d18b0f6743b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.139967] env[69992]: DEBUG nova.network.neutron [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.150328] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1099.150328] env[69992]: value = "task-2897270" [ 1099.150328] env[69992]: _type = "Task" [ 1099.150328] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.159926] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897270, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.284337] env[69992]: DEBUG nova.network.neutron [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Updated VIF entry in instance network info cache for port ae2c507f-1ebf-4321-a3b8-bd98e024886f. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.284707] env[69992]: DEBUG nova.network.neutron [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Updating instance_info_cache with network_info: [{"id": "ae2c507f-1ebf-4321-a3b8-bd98e024886f", "address": "fa:16:3e:87:d0:08", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae2c507f-1e", "ovs_interfaceid": "ae2c507f-1ebf-4321-a3b8-bd98e024886f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.390014] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526cd4d7-aeb4-d6fb-98df-d94c3675d96b, 'name': SearchDatastore_Task, 'duration_secs': 0.010467} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.390364] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.390674] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.390926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.391113] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.391312] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.391597] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0a86dca-9c38-4218-a69c-1990a82e4f7c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.401602] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.401602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.402637] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26cd4516-aa47-4b35-939e-814e7b1c4c12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.408526] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1099.408526] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ca05ea-23ae-ba36-f3e8-d903609802a0" [ 1099.408526] env[69992]: _type = "Task" [ 1099.408526] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.417159] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ca05ea-23ae-ba36-f3e8-d903609802a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.515955] env[69992]: INFO nova.compute.claims [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.541730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9af01d72-b299-4e76-a5e3-38a5ce543acf tempest-ListServerFiltersTestJSON-499730754 tempest-ListServerFiltersTestJSON-499730754-project-member] Lock "98cd0eb8-d17a-4a9b-a172-1ba1207168d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.128s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.661324] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897270, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.690556] env[69992]: DEBUG nova.network.neutron [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.789027] env[69992]: DEBUG oslo_concurrency.lockutils [req-b0214b4c-6755-4227-b213-2269e5c2742a req-2766e82b-a0fe-4ec9-8328-a5d22651d191 service nova] Releasing lock "refresh_cache-086ac14d-74bb-4bb6-90b3-3e345b2894a9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.895711] env[69992]: DEBUG nova.network.neutron [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Updating instance_info_cache with network_info: [{"id": "f0ba32e1-39c0-4939-aafa-23959e961f93", "address": "fa:16:3e:c4:03:44", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ba32e1-39", "ovs_interfaceid": "f0ba32e1-39c0-4939-aafa-23959e961f93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.920367] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ca05ea-23ae-ba36-f3e8-d903609802a0, 'name': SearchDatastore_Task, 'duration_secs': 0.009814} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.921213] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2644d4af-ec8e-427d-940f-a5e6c80a1121 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.928439] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1099.928439] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bd52cc-42c5-5593-6493-6a9cd7e655ee" [ 1099.928439] env[69992]: _type = "Task" [ 1099.928439] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.937236] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bd52cc-42c5-5593-6493-6a9cd7e655ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.023949] env[69992]: INFO nova.compute.resource_tracker [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating resource usage from migration 53d4b0cb-ec86-417c-87f6-76638a7b3c0b [ 1100.161620] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897270, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.398307] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Releasing lock "refresh_cache-32bdb15d-6a4d-4445-9b82-d18b0f6743b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.398608] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Instance network_info: |[{"id": "f0ba32e1-39c0-4939-aafa-23959e961f93", "address": "fa:16:3e:c4:03:44", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ba32e1-39", "ovs_interfaceid": "f0ba32e1-39c0-4939-aafa-23959e961f93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1100.399235] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:03:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0ba32e1-39c0-4939-aafa-23959e961f93', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.410129] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.410129] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.410129] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fab4848-afd9-4c4d-84de-cc0aa7161df3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.434936] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.434936] env[69992]: value = "task-2897271" [ 1100.434936] env[69992]: _type = "Task" [ 1100.434936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.441879] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bd52cc-42c5-5593-6493-6a9cd7e655ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010981} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.444640] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.444922] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 086ac14d-74bb-4bb6-90b3-3e345b2894a9/086ac14d-74bb-4bb6-90b3-3e345b2894a9.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.445428] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1cb3acc-10d7-490e-99e0-082d095f0470 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.451822] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897271, 'name': CreateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.457137] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1100.457137] env[69992]: value = "task-2897272" [ 1100.457137] env[69992]: _type = "Task" [ 1100.457137] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.469748] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.500093] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a65e16-f3cd-421a-823b-fbc188b86dcb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.507981] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1878e6d2-6874-4f38-ba1c-3d8ea59530e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.540503] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdfecd2-b10e-44ba-9b12-157d9e8effa9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.548044] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f290c2b0-2fd8-4d29-8574-5b7e7f973cfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.563198] env[69992]: DEBUG nova.compute.provider_tree [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.630759] env[69992]: DEBUG nova.compute.manager [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Received event network-vif-plugged-f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1100.631244] env[69992]: DEBUG oslo_concurrency.lockutils [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] Acquiring lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.631535] env[69992]: DEBUG oslo_concurrency.lockutils [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.631757] env[69992]: DEBUG oslo_concurrency.lockutils [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.631964] env[69992]: DEBUG nova.compute.manager [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] No waiting events found dispatching network-vif-plugged-f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1100.632167] env[69992]: WARNING nova.compute.manager [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Received unexpected event network-vif-plugged-f0ba32e1-39c0-4939-aafa-23959e961f93 for instance with vm_state building and task_state spawning. [ 1100.632347] env[69992]: DEBUG nova.compute.manager [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Received event network-changed-f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1100.632515] env[69992]: DEBUG nova.compute.manager [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Refreshing instance network info cache due to event network-changed-f0ba32e1-39c0-4939-aafa-23959e961f93. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1100.632709] env[69992]: DEBUG oslo_concurrency.lockutils [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] Acquiring lock "refresh_cache-32bdb15d-6a4d-4445-9b82-d18b0f6743b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.632876] env[69992]: DEBUG oslo_concurrency.lockutils [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] Acquired lock "refresh_cache-32bdb15d-6a4d-4445-9b82-d18b0f6743b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.633079] env[69992]: DEBUG nova.network.neutron [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Refreshing network info cache for port f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.661961] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897270, 'name': CreateVM_Task, 'duration_secs': 1.346849} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.662176] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1100.662591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.662776] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.663162] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1100.663443] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7e0d09b-96f3-4426-83d6-37db4f2d4936 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.668628] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1100.668628] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5242e467-150a-d6a5-32f8-254c47486904" [ 1100.668628] env[69992]: _type = "Task" [ 1100.668628] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.677116] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5242e467-150a-d6a5-32f8-254c47486904, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.948418] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897271, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.971126] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897272, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.066083] env[69992]: DEBUG nova.scheduler.client.report [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.178498] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5242e467-150a-d6a5-32f8-254c47486904, 'name': SearchDatastore_Task, 'duration_secs': 0.014549} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.178800] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.179049] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.179287] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.179435] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.179610] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.179875] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e17cefe3-f203-48cc-801a-725aa6d6f1dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.187522] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.187654] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1101.188343] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4572b3f2-9ac0-4222-bb85-b4c45ace3910 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.193118] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1101.193118] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523249dd-3eec-a1fa-b684-349194572c06" [ 1101.193118] env[69992]: _type = "Task" [ 1101.193118] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.202623] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523249dd-3eec-a1fa-b684-349194572c06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.345987] env[69992]: DEBUG nova.network.neutron [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Updated VIF entry in instance network info cache for port f0ba32e1-39c0-4939-aafa-23959e961f93. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1101.346399] env[69992]: DEBUG nova.network.neutron [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Updating instance_info_cache with network_info: [{"id": "f0ba32e1-39c0-4939-aafa-23959e961f93", "address": "fa:16:3e:c4:03:44", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ba32e1-39", "ovs_interfaceid": "f0ba32e1-39c0-4939-aafa-23959e961f93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.448837] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897271, 'name': CreateVM_Task, 'duration_secs': 0.538128} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.449070] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.450199] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.450199] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.450417] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1101.451445] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e7ac878-3f0a-4cf9-ba79-89f8c45e9865 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.455811] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1101.455811] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c1f5a6-ead4-5106-2071-05cd6d84eeaf" [ 1101.455811] env[69992]: _type = "Task" [ 1101.455811] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.472766] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c1f5a6-ead4-5106-2071-05cd6d84eeaf, 'name': SearchDatastore_Task, 'duration_secs': 0.008916} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.475779] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.476034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.476265] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.476491] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600455} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.476818] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 086ac14d-74bb-4bb6-90b3-3e345b2894a9/086ac14d-74bb-4bb6-90b3-3e345b2894a9.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.477081] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.477141] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e0fbe9e-2ba0-4ed0-9d8b-cb442e233a6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.482880] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1101.482880] env[69992]: value = "task-2897273" [ 1101.482880] env[69992]: _type = "Task" [ 1101.482880] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.492010] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.572510] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.561s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.572510] env[69992]: INFO nova.compute.manager [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Migrating [ 1101.579390] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.612s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.579618] env[69992]: DEBUG nova.objects.instance [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lazy-loading 'resources' on Instance uuid f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.704424] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523249dd-3eec-a1fa-b684-349194572c06, 'name': SearchDatastore_Task, 'duration_secs': 0.009108} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.705322] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a34a89e-51e1-4789-9693-fabf7e2733f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.711155] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1101.711155] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52653bac-b540-5416-c743-9cca4642e62f" [ 1101.711155] env[69992]: _type = "Task" [ 1101.711155] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.719515] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52653bac-b540-5416-c743-9cca4642e62f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.850305] env[69992]: DEBUG oslo_concurrency.lockutils [req-b359a5b3-c743-4635-9405-7469f9f43e0b req-95a24b9c-a846-4457-a0b0-908089fb8eef service nova] Releasing lock "refresh_cache-32bdb15d-6a4d-4445-9b82-d18b0f6743b6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.006456] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06635} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.006456] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1102.006456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211a2f2f-9661-49fe-9137-4d6ae4ae03c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.029559] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 086ac14d-74bb-4bb6-90b3-3e345b2894a9/086ac14d-74bb-4bb6-90b3-3e345b2894a9.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.030205] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba9a8a78-cbda-4e0a-afb4-d3fd4010fc28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.051206] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1102.051206] env[69992]: value = "task-2897274" [ 1102.051206] env[69992]: _type = "Task" [ 1102.051206] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.059028] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897274, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.089873] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.090075] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.090243] env[69992]: DEBUG nova.network.neutron [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.223466] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52653bac-b540-5416-c743-9cca4642e62f, 'name': SearchDatastore_Task, 'duration_secs': 0.00934} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.226253] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.226533] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.227122] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.227323] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.227549] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77052c4e-1353-438c-93e9-e14e4fdd3f6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.229651] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a3f76b7-7f54-4f28-8278-f2769815247b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.240908] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1102.240908] env[69992]: value = "task-2897275" [ 1102.240908] env[69992]: _type = "Task" [ 1102.240908] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.241691] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.241854] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.248080] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34fdeb0d-fc86-4b6b-9c8d-39fd6645b84f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.255903] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.257800] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1102.257800] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bf346b-60e7-9664-685b-59a65d6f2748" [ 1102.257800] env[69992]: _type = "Task" [ 1102.257800] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.265512] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bf346b-60e7-9664-685b-59a65d6f2748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.567155] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897274, 'name': ReconfigVM_Task, 'duration_secs': 0.297712} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.567739] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 086ac14d-74bb-4bb6-90b3-3e345b2894a9/086ac14d-74bb-4bb6-90b3-3e345b2894a9.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.570018] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-623ca6eb-a720-41f4-ab47-79371b43b044 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.578019] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1102.578019] env[69992]: value = "task-2897276" [ 1102.578019] env[69992]: _type = "Task" [ 1102.578019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.590439] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897276, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.705824] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1466cd3b-5af3-42cd-923d-be7a992ec326 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.713227] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177abdcf-4eb9-4036-9a15-a931695fd75f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.750985] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7c6e38-6523-49f8-ad53-e2513d2ef147 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.765071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396c950b-42b4-4b1d-9187-6e77661cae47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.771107] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897275, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.784271] env[69992]: DEBUG nova.compute.provider_tree [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1102.792988] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bf346b-60e7-9664-685b-59a65d6f2748, 'name': SearchDatastore_Task, 'duration_secs': 0.018629} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.794969] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8adee527-9896-4daf-b019-fe4342cbd962 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.801249] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1102.801249] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529dfccd-2c4a-9885-a1be-a0493b536629" [ 1102.801249] env[69992]: _type = "Task" [ 1102.801249] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.812517] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529dfccd-2c4a-9885-a1be-a0493b536629, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.970604] env[69992]: DEBUG nova.network.neutron [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.094030] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897276, 'name': Rename_Task, 'duration_secs': 0.328566} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.094030] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.094623] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b055a6da-6dd1-4dc8-843d-f8f790a71114 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.102645] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1103.102645] env[69992]: value = "task-2897277" [ 1103.102645] env[69992]: _type = "Task" [ 1103.102645] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.112281] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.258068] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593544} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.258362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1103.258577] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1103.258839] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7084811-dbf8-4c5e-acfa-8b45ae538b37 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.265730] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1103.265730] env[69992]: value = "task-2897278" [ 1103.265730] env[69992]: _type = "Task" [ 1103.265730] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.274164] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.317017] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529dfccd-2c4a-9885-a1be-a0493b536629, 'name': SearchDatastore_Task, 'duration_secs': 0.018158} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.317017] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.317017] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 32bdb15d-6a4d-4445-9b82-d18b0f6743b6/32bdb15d-6a4d-4445-9b82-d18b0f6743b6.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.317017] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-941d52ae-d701-46e0-ae7f-2a5a1908c8e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.321054] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1103.321054] env[69992]: value = "task-2897279" [ 1103.321054] env[69992]: _type = "Task" [ 1103.321054] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.333784] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.339779] env[69992]: DEBUG nova.scheduler.client.report [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1103.340074] env[69992]: DEBUG nova.compute.provider_tree [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 96 to 97 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1103.340261] env[69992]: DEBUG nova.compute.provider_tree [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1103.475117] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.620858] env[69992]: DEBUG oslo_vmware.api [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897277, 'name': PowerOnVM_Task, 'duration_secs': 0.508258} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.621201] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.621460] env[69992]: INFO nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1103.621711] env[69992]: DEBUG nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.623108] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2380d662-b266-42ab-ade8-b0bda4e5d9c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.777484] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08009} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.777755] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1103.778530] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c73842b-74c7-47fa-8d09-fd97e633a179 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.797541] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.798149] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfdc5578-0947-49a8-b60d-a8ecbbf58ed0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.816764] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1103.816764] env[69992]: value = "task-2897280" [ 1103.816764] env[69992]: _type = "Task" [ 1103.816764] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.827159] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897280, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.832019] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897279, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.847096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.268s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.849519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.946s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.849805] env[69992]: DEBUG nova.objects.instance [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lazy-loading 'resources' on Instance uuid 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.868907] env[69992]: INFO nova.scheduler.client.report [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleted allocations for instance f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75 [ 1104.151955] env[69992]: INFO nova.compute.manager [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Took 55.54 seconds to build instance. [ 1104.327780] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.332779] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897279, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.375737] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7426a6a6-7b4b-45be-ae6a-ad0999e91ee4 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.921s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.654347] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9e55575-f2f8-4770-a53b-96777722aaf0 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.141s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.836412] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897280, 'name': ReconfigVM_Task, 'duration_secs': 0.674748} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.837022] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897279, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.837022] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.837603] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2aad8234-6730-40df-b2f4-69a371086859 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.846561] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1104.846561] env[69992]: value = "task-2897281" [ 1104.846561] env[69992]: _type = "Task" [ 1104.846561] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.855917] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897281, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.862121] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c1132c-94ac-4b67-adc6-7fa1bcc4ac72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.868895] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f51770b-ff85-448c-a113-02485128ba4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.899802] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2eee910-d9f7-4584-bbbb-b55208abc590 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.907072] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a6f3df-df56-42fc-807a-27f6b97d7c68 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.920316] env[69992]: DEBUG nova.compute.provider_tree [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.994433] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d93c9b-a6a7-470b-a58e-65222bb1c214 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.014114] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1105.174057] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.174625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.174701] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.174965] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.175235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.178124] env[69992]: INFO nova.compute.manager [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Terminating instance [ 1105.253355] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.253628] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.253835] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.254040] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.254225] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.256740] env[69992]: INFO nova.compute.manager [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Terminating instance [ 1105.333448] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897279, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.985834} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.333741] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 32bdb15d-6a4d-4445-9b82-d18b0f6743b6/32bdb15d-6a4d-4445-9b82-d18b0f6743b6.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1105.333955] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1105.334224] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-449f8c14-2697-4040-b633-3dcbe3eac1a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.340230] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1105.340230] env[69992]: value = "task-2897282" [ 1105.340230] env[69992]: _type = "Task" [ 1105.340230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.347886] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897282, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.356656] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897281, 'name': Rename_Task, 'duration_secs': 0.140603} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.356963] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.357219] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d31783f-854b-4e7b-b4bc-5a12dea3f580 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.362714] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1105.362714] env[69992]: value = "task-2897283" [ 1105.362714] env[69992]: _type = "Task" [ 1105.362714] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.370181] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.424456] env[69992]: DEBUG nova.scheduler.client.report [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.520201] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.520658] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6317092a-55df-4884-9554-75b49ab692c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.528498] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1105.528498] env[69992]: value = "task-2897284" [ 1105.528498] env[69992]: _type = "Task" [ 1105.528498] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.536661] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.638032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.638279] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.685933] env[69992]: DEBUG nova.compute.manager [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.686172] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.688299] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e06c03-7c8c-4c2e-a7de-071a1a9d3a61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.696527] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.696865] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d66082b-6105-4c6d-9728-c5fd12df3f8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.707451] env[69992]: DEBUG oslo_vmware.api [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1105.707451] env[69992]: value = "task-2897285" [ 1105.707451] env[69992]: _type = "Task" [ 1105.707451] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.717228] env[69992]: DEBUG oslo_vmware.api [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.760230] env[69992]: DEBUG nova.compute.manager [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.760466] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.761407] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd22671-f462-47b8-ade2-cd9c32d3f364 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.769560] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.769838] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb7d51a3-bea7-4c5e-87be-c34b74459289 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.776418] env[69992]: DEBUG oslo_vmware.api [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1105.776418] env[69992]: value = "task-2897286" [ 1105.776418] env[69992]: _type = "Task" [ 1105.776418] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.784983] env[69992]: DEBUG oslo_vmware.api [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.851910] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897282, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074259} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.852418] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.853756] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bd23c3-0386-4a50-a0b6-f5871c9af05f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.883037] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 32bdb15d-6a4d-4445-9b82-d18b0f6743b6/32bdb15d-6a4d-4445-9b82-d18b0f6743b6.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.886502] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dc60fab-fd11-4726-ab0e-8c8776feb284 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.906398] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897283, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.908449] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1105.908449] env[69992]: value = "task-2897287" [ 1105.908449] env[69992]: _type = "Task" [ 1105.908449] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.916782] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.929854] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.080s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.933937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.666s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.935528] env[69992]: INFO nova.compute.claims [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.941243] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.941486] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.957361] env[69992]: INFO nova.scheduler.client.report [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Deleted allocations for instance 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4 [ 1106.037976] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897284, 'name': PowerOffVM_Task, 'duration_secs': 0.211859} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.038273] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.038471] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1106.140477] env[69992]: DEBUG nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1106.216780] env[69992]: DEBUG oslo_vmware.api [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897285, 'name': PowerOffVM_Task, 'duration_secs': 0.31821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.217054] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.217268] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.217547] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7379bf3b-91f9-4a0a-b611-e641a13a9548 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.286309] env[69992]: DEBUG oslo_vmware.api [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897286, 'name': PowerOffVM_Task, 'duration_secs': 0.233886} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.286593] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.286818] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.287092] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21fed478-b5a2-4acf-947a-d231d20eb2be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.299808] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.300069] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.302031] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleting the datastore file [datastore2] 131096fc-addf-4d9a-9cd7-4abe98aabd1f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.302031] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33b85174-0e09-416d-a3d3-0e27f04da8f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.309089] env[69992]: DEBUG oslo_vmware.api [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1106.309089] env[69992]: value = "task-2897290" [ 1106.309089] env[69992]: _type = "Task" [ 1106.309089] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.316074] env[69992]: DEBUG oslo_vmware.api [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.357196] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.357196] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.357196] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleting the datastore file [datastore2] 086ac14d-74bb-4bb6-90b3-3e345b2894a9 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.357365] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5a3bb7d-496e-4c06-950c-c3635a82de95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.364016] env[69992]: DEBUG oslo_vmware.api [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1106.364016] env[69992]: value = "task-2897291" [ 1106.364016] env[69992]: _type = "Task" [ 1106.364016] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.374105] env[69992]: DEBUG oslo_vmware.api [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.385941] env[69992]: DEBUG oslo_vmware.api [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897283, 'name': PowerOnVM_Task, 'duration_secs': 0.598543} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.386295] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.386549] env[69992]: DEBUG nova.compute.manager [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.387403] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8b2cce-1abc-45fe-a681-5a958e4a6314 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.418460] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897287, 'name': ReconfigVM_Task, 'duration_secs': 0.312583} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.418761] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 32bdb15d-6a4d-4445-9b82-d18b0f6743b6/32bdb15d-6a4d-4445-9b82-d18b0f6743b6.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.419495] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-820c7c4d-7335-4b88-a1b3-6f60edc06854 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.425343] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1106.425343] env[69992]: value = "task-2897292" [ 1106.425343] env[69992]: _type = "Task" [ 1106.425343] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.433193] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897292, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.445297] env[69992]: DEBUG nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1106.471218] env[69992]: DEBUG oslo_concurrency.lockutils [None req-77886f08-f034-42b3-90fe-b1db4d0482d9 tempest-ListServersNegativeTestJSON-1216494605 tempest-ListServersNegativeTestJSON-1216494605-project-member] Lock "40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.080s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.544499] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1106.544801] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.545988] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1106.545988] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.545988] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1106.545988] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1106.545988] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1106.546342] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1106.546342] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1106.546485] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1106.546721] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1106.554064] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cb3761f-709f-4979-8ca1-59aaa7740b37 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.569703] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1106.569703] env[69992]: value = "task-2897293" [ 1106.569703] env[69992]: _type = "Task" [ 1106.569703] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.577578] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.661871] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.818204] env[69992]: DEBUG oslo_vmware.api [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166837} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.818491] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.818670] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.818849] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.819032] env[69992]: INFO nova.compute.manager [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1106.819272] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.819457] env[69992]: DEBUG nova.compute.manager [-] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.819551] env[69992]: DEBUG nova.network.neutron [-] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.872738] env[69992]: DEBUG oslo_vmware.api [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15142} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.872986] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.873191] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.873367] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.873540] env[69992]: INFO nova.compute.manager [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1106.873775] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.873967] env[69992]: DEBUG nova.compute.manager [-] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.874071] env[69992]: DEBUG nova.network.neutron [-] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.907345] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.934685] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897292, 'name': Rename_Task, 'duration_secs': 0.158758} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.934956] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.935227] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b7493aa-335d-4a56-8b8f-564dddd9459a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.941951] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1106.941951] env[69992]: value = "task-2897294" [ 1106.941951] env[69992]: _type = "Task" [ 1106.941951] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.957377] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.972098] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.090387] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897293, 'name': ReconfigVM_Task, 'duration_secs': 0.420174} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.090387] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1107.299391] env[69992]: INFO nova.compute.manager [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Rebuilding instance [ 1107.341916] env[69992]: DEBUG nova.compute.manager [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.342830] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09254550-63da-4bc8-bf90-d707b716c43d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.386774] env[69992]: DEBUG nova.compute.manager [req-bc60d772-3907-4fb1-a1b8-948bde32739a req-ba4e5280-9f65-43c0-b7ce-1a7f6c20583b service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Received event network-vif-deleted-ae2c507f-1ebf-4321-a3b8-bd98e024886f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.387020] env[69992]: INFO nova.compute.manager [req-bc60d772-3907-4fb1-a1b8-948bde32739a req-ba4e5280-9f65-43c0-b7ce-1a7f6c20583b service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Neutron deleted interface ae2c507f-1ebf-4321-a3b8-bd98e024886f; detaching it from the instance and deleting it from the info cache [ 1107.387178] env[69992]: DEBUG nova.network.neutron [req-bc60d772-3907-4fb1-a1b8-948bde32739a req-ba4e5280-9f65-43c0-b7ce-1a7f6c20583b service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.452627] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897294, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.456346] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23c80b5-8773-4e8a-a22f-fc6e90cbcab1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.463136] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c18d85-42e5-43a8-ba45-7d94edb73342 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.497885] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee8d5d8-131e-46a0-9a5f-9487798348af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.508308] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a0d668-e852-41fc-9511-c69e4fb676cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.522583] env[69992]: DEBUG nova.compute.provider_tree [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.597092] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:47:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b7d7a3d8-e1c4-4412-993d-af11150bffcc',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-318102732',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1107.597373] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.597549] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1107.597737] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.597904] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1107.598130] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1107.598352] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1107.598545] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1107.598727] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1107.599260] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1107.599260] env[69992]: DEBUG nova.virt.hardware [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1107.604744] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1107.605428] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-272f9439-eea9-479a-be0e-a4e2cad55a35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.626682] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1107.626682] env[69992]: value = "task-2897295" [ 1107.626682] env[69992]: _type = "Task" [ 1107.626682] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.638643] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897295, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.674039] env[69992]: DEBUG nova.network.neutron [-] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.892708] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d41a9ca-96b6-4f80-b139-2551e6513d0f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.902105] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9a13af-c716-4351-8d0e-296956f02922 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.923286] env[69992]: DEBUG nova.compute.manager [req-25477ac6-b000-482b-ad6a-812e9d426b9a req-044595dc-3d6d-4fdd-93dc-8651ed8ae3ef service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Received event network-vif-deleted-64c65906-7b18-4487-a141-432f0ac29177 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.923286] env[69992]: INFO nova.compute.manager [req-25477ac6-b000-482b-ad6a-812e9d426b9a req-044595dc-3d6d-4fdd-93dc-8651ed8ae3ef service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Neutron deleted interface 64c65906-7b18-4487-a141-432f0ac29177; detaching it from the instance and deleting it from the info cache [ 1107.923286] env[69992]: DEBUG nova.network.neutron [req-25477ac6-b000-482b-ad6a-812e9d426b9a req-044595dc-3d6d-4fdd-93dc-8651ed8ae3ef service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.943440] env[69992]: DEBUG nova.compute.manager [req-bc60d772-3907-4fb1-a1b8-948bde32739a req-ba4e5280-9f65-43c0-b7ce-1a7f6c20583b service nova] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Detach interface failed, port_id=ae2c507f-1ebf-4321-a3b8-bd98e024886f, reason: Instance 086ac14d-74bb-4bb6-90b3-3e345b2894a9 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1107.953774] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897294, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.028021] env[69992]: DEBUG nova.scheduler.client.report [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.123222] env[69992]: DEBUG nova.network.neutron [-] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.136861] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897295, 'name': ReconfigVM_Task, 'duration_secs': 0.251213} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.137973] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1108.139024] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4481f2de-d326-4ac6-97db-1b7fb0fee447 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.165177] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.166246] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fc1a1e8-7ca9-490b-9695-33e3c9480f0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.179401] env[69992]: INFO nova.compute.manager [-] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Took 1.31 seconds to deallocate network for instance. [ 1108.189589] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1108.189589] env[69992]: value = "task-2897296" [ 1108.189589] env[69992]: _type = "Task" [ 1108.189589] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.200699] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897296, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.361278] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.361634] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52807241-f11a-4f47-951c-1915c37f43b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.369377] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1108.369377] env[69992]: value = "task-2897297" [ 1108.369377] env[69992]: _type = "Task" [ 1108.369377] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.377577] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.425601] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ce6b3c9-5148-414f-86c9-ece5e7d5ba1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.434967] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857bcb8b-dc99-4f2a-a791-fe9899828305 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.453794] env[69992]: DEBUG oslo_vmware.api [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897294, 'name': PowerOnVM_Task, 'duration_secs': 1.063694} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.454202] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1108.454455] env[69992]: INFO nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1108.454685] env[69992]: DEBUG nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1108.455537] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6913a31c-5542-4452-a478-2bcb396f6f23 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.478370] env[69992]: DEBUG nova.compute.manager [req-25477ac6-b000-482b-ad6a-812e9d426b9a req-044595dc-3d6d-4fdd-93dc-8651ed8ae3ef service nova] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Detach interface failed, port_id=64c65906-7b18-4487-a141-432f0ac29177, reason: Instance 131096fc-addf-4d9a-9cd7-4abe98aabd1f could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1108.530588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.531127] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1108.534749] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.192s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.536408] env[69992]: INFO nova.compute.claims [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.625855] env[69992]: INFO nova.compute.manager [-] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Took 1.81 seconds to deallocate network for instance. [ 1108.691955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.701663] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897296, 'name': ReconfigVM_Task, 'duration_secs': 0.377184} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.703250] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1108.703250] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1108.880999] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897297, 'name': PowerOffVM_Task, 'duration_secs': 0.112106} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.881281] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1108.881534] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1108.882329] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc57d6b2-2475-496c-a4bc-abd9b696bd51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.889999] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1108.890914] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d374ffec-75db-4f66-ab3c-82c80b850ad6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.918148] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.918148] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.918148] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Deleting the datastore file [datastore2] 7932a42f-6a62-4c2c-be9a-3cb518fe4183 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.918148] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66af95bf-0ec2-4dd4-a67b-6e3a261a5d6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.928150] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1108.928150] env[69992]: value = "task-2897299" [ 1108.928150] env[69992]: _type = "Task" [ 1108.928150] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.947208] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.986263] env[69992]: INFO nova.compute.manager [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Took 55.86 seconds to build instance. [ 1109.042390] env[69992]: DEBUG nova.compute.utils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.046200] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.046409] env[69992]: DEBUG nova.network.neutron [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.119025] env[69992]: DEBUG nova.policy [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '044902c6075d41739188628ba5ebd58d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b51b8195c4e7418cbdaa66aa5e5aff5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.137870] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.210381] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6a2e5a-1da6-4afd-909b-4b0575f06297 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.231307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82aaa824-553e-4041-82fd-0df4772f8823 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.251595] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1109.369169] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a00045-1d8c-4034-bf1c-7880b0f7a211 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.376278] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Suspending the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1109.376523] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-31e6f833-d655-47d7-8b23-2fbfcdc1c422 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.382725] env[69992]: DEBUG oslo_vmware.api [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] Waiting for the task: (returnval){ [ 1109.382725] env[69992]: value = "task-2897300" [ 1109.382725] env[69992]: _type = "Task" [ 1109.382725] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.394578] env[69992]: DEBUG oslo_vmware.api [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] Task: {'id': task-2897300, 'name': SuspendVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.441652] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126554} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.442284] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.442659] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.443032] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.489515] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e6342065-f8e9-4655-9962-5bf691cb36c6 tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.197s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.525282] env[69992]: DEBUG nova.network.neutron [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Successfully created port: fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1109.546752] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1109.817399] env[69992]: DEBUG nova.network.neutron [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Port 47e06987-ed7c-4f19-8716-20716e1056c3 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1109.893438] env[69992]: DEBUG oslo_vmware.api [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] Task: {'id': task-2897300, 'name': SuspendVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.133675] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7320b94e-f400-4cd6-8f00-b8e9e0ff9fa7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.142144] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8748d2-21a4-4f63-852b-d2f5ec4edac0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.172552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24f8815-aed4-4628-a822-5de49ad1b2f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.180291] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c97923f-fade-40d2-acdc-28527361bd8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.194955] env[69992]: DEBUG nova.compute.provider_tree [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.394635] env[69992]: DEBUG oslo_vmware.api [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] Task: {'id': task-2897300, 'name': SuspendVM_Task, 'duration_secs': 0.623302} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.394635] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Suspended the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1110.394635] env[69992]: DEBUG nova.compute.manager [None req-460424f4-2841-41cb-97e6-de65e440b532 tempest-ServersAdminNegativeTestJSON-1598336459 tempest-ServersAdminNegativeTestJSON-1598336459-project-admin] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.395022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d00e48-e7d2-4460-9875-5fb55e9f1a95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.481299] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1110.481595] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.481709] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1110.481886] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.482255] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1110.482449] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1110.482722] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1110.482858] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1110.483039] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1110.483207] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1110.483382] env[69992]: DEBUG nova.virt.hardware [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1110.484249] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb6fcdf-f9c2-446e-b9a7-3346c67912a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.492330] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d43f5c-7a1b-48f7-913e-61a1f7357efa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.506050] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.512126] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.512679] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1110.512901] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f2819a7-ccfb-4520-85ec-3053191fb259 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.530670] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.530670] env[69992]: value = "task-2897301" [ 1110.530670] env[69992]: _type = "Task" [ 1110.530670] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.538804] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897301, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.559130] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1110.585853] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1110.587021] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.587021] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1110.587021] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.587021] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1110.587249] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1110.587563] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1110.587746] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1110.587922] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1110.588120] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1110.588307] env[69992]: DEBUG nova.virt.hardware [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1110.589216] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25109ed-4652-4aac-87e1-62ab7958fedf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.597684] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b690b6-158e-441b-94d2-c77b5d1e94d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.700190] env[69992]: DEBUG nova.scheduler.client.report [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.841157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.841394] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.841617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.040567] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897301, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.205123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.205123] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1111.207861] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.131s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.209016] env[69992]: DEBUG nova.objects.instance [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lazy-loading 'resources' on Instance uuid 06442c68-7dc6-46a1-9e35-34a62730a555 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.231898] env[69992]: DEBUG nova.compute.manager [req-c8c94cc9-b59c-4a2f-bdf8-69a960f8d4ee req-c421fce6-8006-480f-88ef-681052b3589e service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Received event network-vif-plugged-fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.232148] env[69992]: DEBUG oslo_concurrency.lockutils [req-c8c94cc9-b59c-4a2f-bdf8-69a960f8d4ee req-c421fce6-8006-480f-88ef-681052b3589e service nova] Acquiring lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.232361] env[69992]: DEBUG oslo_concurrency.lockutils [req-c8c94cc9-b59c-4a2f-bdf8-69a960f8d4ee req-c421fce6-8006-480f-88ef-681052b3589e service nova] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.232530] env[69992]: DEBUG oslo_concurrency.lockutils [req-c8c94cc9-b59c-4a2f-bdf8-69a960f8d4ee req-c421fce6-8006-480f-88ef-681052b3589e service nova] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.232704] env[69992]: DEBUG nova.compute.manager [req-c8c94cc9-b59c-4a2f-bdf8-69a960f8d4ee req-c421fce6-8006-480f-88ef-681052b3589e service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] No waiting events found dispatching network-vif-plugged-fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1111.232865] env[69992]: WARNING nova.compute.manager [req-c8c94cc9-b59c-4a2f-bdf8-69a960f8d4ee req-c421fce6-8006-480f-88ef-681052b3589e service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Received unexpected event network-vif-plugged-fd0c5f07-29de-4e64-a60c-655c3da4bb9e for instance with vm_state building and task_state spawning. [ 1111.309606] env[69992]: DEBUG nova.network.neutron [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Successfully updated port: fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.541635] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897301, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.712067] env[69992]: DEBUG nova.compute.utils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.715967] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.715967] env[69992]: DEBUG nova.network.neutron [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.756912] env[69992]: DEBUG nova.policy [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc6792edfe6245d2ba77a14aba041ca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '658cab8ee4194f7f98dd07de450f248b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.812475] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.812703] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.812908] env[69992]: DEBUG nova.network.neutron [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.886449] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.886449] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.886718] env[69992]: DEBUG nova.network.neutron [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.046821] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897301, 'name': CreateVM_Task, 'duration_secs': 1.294997} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.050617] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1112.051801] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.051801] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.052016] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1112.052174] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e25e656-ec6b-406b-9364-d483e7bc0690 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.058581] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1112.058581] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521b6f6a-b1c1-a378-d724-5adda0f92048" [ 1112.058581] env[69992]: _type = "Task" [ 1112.058581] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.076678] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521b6f6a-b1c1-a378-d724-5adda0f92048, 'name': SearchDatastore_Task, 'duration_secs': 0.010455} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.076678] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.076678] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1112.076678] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.076678] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.076678] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1112.076678] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f070527d-eab9-45aa-a75e-bdc40a35074a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.089232] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1112.089426] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1112.090459] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67521cef-3f75-4e9b-b370-73c966515f5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.096270] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1112.096270] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a54917-a413-114b-1807-0ab35a8e4a19" [ 1112.096270] env[69992]: _type = "Task" [ 1112.096270] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.104546] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a54917-a413-114b-1807-0ab35a8e4a19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.216686] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.319993] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746c1293-4e22-4d8a-8c97-0fad7f2fa9dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.328583] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2ebc7d-fb3b-4753-831d-ecd04c016451 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.360490] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8532885e-ecf7-4429-a726-0c4af4914491 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.363755] env[69992]: DEBUG nova.network.neutron [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.371722] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd0c2da-87b1-4669-b906-5bdd28bc50fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.384834] env[69992]: DEBUG nova.compute.provider_tree [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.535456] env[69992]: DEBUG nova.network.neutron [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Successfully created port: 37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1112.583293] env[69992]: DEBUG nova.network.neutron [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance_info_cache with network_info: [{"id": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "address": "fa:16:3e:8b:b7:f0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0c5f07-29", "ovs_interfaceid": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.606365] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a54917-a413-114b-1807-0ab35a8e4a19, 'name': SearchDatastore_Task, 'duration_secs': 0.008619} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.607797] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96228955-d806-4497-ad1d-ad6fc53f3058 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.616631] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1112.616631] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d32dd1-9203-55aa-47c5-5abf57845418" [ 1112.616631] env[69992]: _type = "Task" [ 1112.616631] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.627241] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d32dd1-9203-55aa-47c5-5abf57845418, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.696074] env[69992]: DEBUG nova.network.neutron [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.817398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "37751af7-267e-4693-aaa3-cd1bb9c3d950" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.817633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.892029] env[69992]: DEBUG nova.scheduler.client.report [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.085560] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.085820] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Instance network_info: |[{"id": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "address": "fa:16:3e:8b:b7:f0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0c5f07-29", "ovs_interfaceid": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1113.086268] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:b7:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd0c5f07-29de-4e64-a60c-655c3da4bb9e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1113.096474] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.096474] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1113.096474] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dda94f22-158d-4454-b601-117866d380fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.114026] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.114026] env[69992]: value = "task-2897302" [ 1113.114026] env[69992]: _type = "Task" [ 1113.114026] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.124440] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897302, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.127780] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d32dd1-9203-55aa-47c5-5abf57845418, 'name': SearchDatastore_Task, 'duration_secs': 0.010361} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.128029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.128478] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1113.128576] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7cce322-1a5c-4968-8d46-c07f83a7a28e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.134179] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1113.134179] env[69992]: value = "task-2897303" [ 1113.134179] env[69992]: _type = "Task" [ 1113.134179] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.144564] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.203032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.226063] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.264808] env[69992]: DEBUG nova.compute.manager [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Received event network-changed-fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.265131] env[69992]: DEBUG nova.compute.manager [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Refreshing instance network info cache due to event network-changed-fd0c5f07-29de-4e64-a60c-655c3da4bb9e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.265362] env[69992]: DEBUG oslo_concurrency.lockutils [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] Acquiring lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.265458] env[69992]: DEBUG oslo_concurrency.lockutils [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] Acquired lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.265623] env[69992]: DEBUG nova.network.neutron [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Refreshing network info cache for port fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.271603] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.271603] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.271603] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.271603] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.271603] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.271603] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.272299] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.272299] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.272299] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.272392] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.272587] env[69992]: DEBUG nova.virt.hardware [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.274734] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9141c2dd-d497-4702-8156-ddacb4fec758 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.289923] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580a8992-b176-4a40-9235-81e2eebefedb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.322598] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1113.395737] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.399662] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.217s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.399662] env[69992]: DEBUG nova.objects.instance [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'resources' on Instance uuid e5d9de80-1ee5-462a-8459-168fd60e1972 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1113.429085] env[69992]: INFO nova.scheduler.client.report [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Deleted allocations for instance 06442c68-7dc6-46a1-9e35-34a62730a555 [ 1113.623861] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897302, 'name': CreateVM_Task, 'duration_secs': 0.442777} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.624125] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.624750] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.624911] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.625232] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1113.625492] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44e9c42d-c637-45be-94c9-b3a57232f097 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.630922] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1113.630922] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5241660d-ba82-d603-f6e9-629e93f2a81d" [ 1113.630922] env[69992]: _type = "Task" [ 1113.630922] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.641575] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5241660d-ba82-d603-f6e9-629e93f2a81d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.646532] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453614} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.646648] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.646852] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.647097] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf2428e1-c972-40cb-a3e5-a44363ba42c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.653209] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1113.653209] env[69992]: value = "task-2897304" [ 1113.653209] env[69992]: _type = "Task" [ 1113.653209] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.662181] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.728016] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae57be2-f474-4599-82b0-da0a33201fa3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.754589] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a67ca38-05d6-4a9f-bd90-83119e4b2de1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.764034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1113.846983] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.931460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.931589] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.932088] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.932088] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.932216] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.938221] env[69992]: INFO nova.compute.manager [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Terminating instance [ 1113.939926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c7508978-ba8b-4190-9176-77112468ae97 tempest-ServerMetadataNegativeTestJSON-1469643113 tempest-ServerMetadataNegativeTestJSON-1469643113-project-member] Lock "06442c68-7dc6-46a1-9e35-34a62730a555" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.452s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.984818] env[69992]: DEBUG nova.network.neutron [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updated VIF entry in instance network info cache for port fd0c5f07-29de-4e64-a60c-655c3da4bb9e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1113.985170] env[69992]: DEBUG nova.network.neutron [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance_info_cache with network_info: [{"id": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "address": "fa:16:3e:8b:b7:f0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0c5f07-29", "ovs_interfaceid": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.143388] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5241660d-ba82-d603-f6e9-629e93f2a81d, 'name': SearchDatastore_Task, 'duration_secs': 0.010401} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.143388] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.143623] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.143897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.144096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.144490] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.145431] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f41dde0a-449d-4713-9280-12d73335581b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.153104] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.153299] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.154085] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed717868-b2ba-4c70-ba43-73daea2d0c3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.168019] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1114.168019] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5282288f-4425-f94d-9aa2-583ee1da25e6" [ 1114.168019] env[69992]: _type = "Task" [ 1114.168019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.170570] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063982} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.173907] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1114.174248] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68345ad-89ad-4587-90d9-41f13c8e50d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.183745] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5282288f-4425-f94d-9aa2-583ee1da25e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008772} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.201021] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.205667] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ded02311-321d-4879-8867-59510d33bcec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.209847] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2496ca9d-8731-4aee-8165-829ca9964107 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.229069] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1114.229069] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5220670a-0624-e446-f150-bd5accb21a74" [ 1114.229069] env[69992]: _type = "Task" [ 1114.229069] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.237690] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5220670a-0624-e446-f150-bd5accb21a74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.241250] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1114.241250] env[69992]: value = "task-2897305" [ 1114.241250] env[69992]: _type = "Task" [ 1114.241250] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.249125] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897305, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.273218] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1114.273530] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61d4240b-1401-4be5-bb33-6ed58ee73859 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.281682] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1114.281682] env[69992]: value = "task-2897306" [ 1114.281682] env[69992]: _type = "Task" [ 1114.281682] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.295013] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.445018] env[69992]: DEBUG nova.compute.manager [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.445256] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.446141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09da7fad-63b3-48f5-98df-8f8c17e2f1de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.454406] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1114.454721] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbf150f2-6371-4625-a862-cfd73ee86791 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.487921] env[69992]: DEBUG oslo_concurrency.lockutils [req-c6f92277-5ac1-4570-86db-10df7e626967 req-84a1d187-79d6-4f2f-9e03-84b0373eb460 service nova] Releasing lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.521029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1114.521029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1114.521219] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Deleting the datastore file [datastore2] 32bdb15d-6a4d-4445-9b82-d18b0f6743b6 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.521423] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ec5ce66-388c-42ac-b3aa-9e238b0c00b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.528054] env[69992]: DEBUG oslo_vmware.api [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1114.528054] env[69992]: value = "task-2897308" [ 1114.528054] env[69992]: _type = "Task" [ 1114.528054] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.532671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613c1fe8-d82c-4de8-8e4b-3353f0fc9497 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.537975] env[69992]: DEBUG oslo_vmware.api [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.543077] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58b6249-3fc9-4f9a-aae8-626e6e891dfe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.575065] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6323ec5-ea99-4725-a1ad-e7561cd5dd23 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.582545] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02c992b-cf93-4b2f-8976-ea8b89519a4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.595936] env[69992]: DEBUG nova.compute.provider_tree [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.720207] env[69992]: DEBUG nova.network.neutron [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Successfully updated port: 37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.742997] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5220670a-0624-e446-f150-bd5accb21a74, 'name': SearchDatastore_Task, 'duration_secs': 0.010204} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.746446] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.746777] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079/9df7b187-e579-41b0-9d24-be2a1ae93079.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1114.747410] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb665037-3b7b-4890-8375-24a9d5b2bcae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.756319] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897305, 'name': ReconfigVM_Task, 'duration_secs': 0.396919} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.759102] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 7932a42f-6a62-4c2c-be9a-3cb518fe4183/7932a42f-6a62-4c2c-be9a-3cb518fe4183.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.759854] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1114.759854] env[69992]: value = "task-2897309" [ 1114.759854] env[69992]: _type = "Task" [ 1114.759854] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.760152] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab9c19c6-d892-4475-8d41-0fa70adc7ffe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.770788] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.772009] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1114.772009] env[69992]: value = "task-2897310" [ 1114.772009] env[69992]: _type = "Task" [ 1114.772009] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.779551] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897310, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.790358] env[69992]: DEBUG oslo_vmware.api [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897306, 'name': PowerOnVM_Task, 'duration_secs': 0.396896} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.790691] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1114.790920] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-87d65179-7787-48d2-b947-caf773ff5f9a tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance '0e8163d9-6ff5-4f1e-af33-ccb42fa46750' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1115.039807] env[69992]: DEBUG oslo_vmware.api [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168908} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.041034] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.041034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1115.041034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1115.041034] env[69992]: INFO nova.compute.manager [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1115.041318] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.041617] env[69992]: DEBUG nova.compute.manager [-] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1115.041813] env[69992]: DEBUG nova.network.neutron [-] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1115.103173] env[69992]: DEBUG nova.scheduler.client.report [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.224947] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "refresh_cache-546fb923-4574-4407-8625-69e6c4d8d35e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.225279] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "refresh_cache-546fb923-4574-4407-8625-69e6c4d8d35e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.225279] env[69992]: DEBUG nova.network.neutron [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.275687] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470813} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.278710] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079/9df7b187-e579-41b0-9d24-be2a1ae93079.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1115.278925] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1115.279189] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1e402d7-b5aa-4e6c-9e19-5c201159fa2c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.286015] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897310, 'name': Rename_Task, 'duration_secs': 0.225534} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.287247] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.287560] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1115.287560] env[69992]: value = "task-2897311" [ 1115.287560] env[69992]: _type = "Task" [ 1115.287560] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.287743] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56aee1b8-4dec-4a2c-a902-96bebc669cb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.296916] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.299314] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Waiting for the task: (returnval){ [ 1115.299314] env[69992]: value = "task-2897312" [ 1115.299314] env[69992]: _type = "Task" [ 1115.299314] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.310822] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897312, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.399950] env[69992]: DEBUG nova.compute.manager [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Received event network-vif-plugged-37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.400136] env[69992]: DEBUG oslo_concurrency.lockutils [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] Acquiring lock "546fb923-4574-4407-8625-69e6c4d8d35e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.400433] env[69992]: DEBUG oslo_concurrency.lockutils [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] Lock "546fb923-4574-4407-8625-69e6c4d8d35e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.400704] env[69992]: DEBUG oslo_concurrency.lockutils [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] Lock "546fb923-4574-4407-8625-69e6c4d8d35e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.401152] env[69992]: DEBUG nova.compute.manager [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] No waiting events found dispatching network-vif-plugged-37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.401324] env[69992]: WARNING nova.compute.manager [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Received unexpected event network-vif-plugged-37beebe5-49d5-45f4-9dff-8ea169c1920f for instance with vm_state building and task_state spawning. [ 1115.401566] env[69992]: DEBUG nova.compute.manager [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Received event network-changed-37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.401805] env[69992]: DEBUG nova.compute.manager [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Refreshing instance network info cache due to event network-changed-37beebe5-49d5-45f4-9dff-8ea169c1920f. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1115.402082] env[69992]: DEBUG oslo_concurrency.lockutils [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] Acquiring lock "refresh_cache-546fb923-4574-4407-8625-69e6c4d8d35e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.611334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.615157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.344s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.617038] env[69992]: INFO nova.compute.claims [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.653837] env[69992]: INFO nova.scheduler.client.report [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted allocations for instance e5d9de80-1ee5-462a-8459-168fd60e1972 [ 1115.794327] env[69992]: DEBUG nova.network.neutron [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1115.801617] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067981} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.805028] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1115.805580] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8fa959-60e0-4d2b-9a14-0af1ac1487bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.817455] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897312, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.849989] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079/9df7b187-e579-41b0-9d24-be2a1ae93079.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1115.853601] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd7dbf2c-5db3-4ce7-ba30-4b961badc4f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.874475] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1115.874475] env[69992]: value = "task-2897313" [ 1115.874475] env[69992]: _type = "Task" [ 1115.874475] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.883251] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.968027] env[69992]: DEBUG nova.network.neutron [-] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.054181] env[69992]: DEBUG nova.network.neutron [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Updating instance_info_cache with network_info: [{"id": "37beebe5-49d5-45f4-9dff-8ea169c1920f", "address": "fa:16:3e:cb:49:9c", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37beebe5-49", "ovs_interfaceid": "37beebe5-49d5-45f4-9dff-8ea169c1920f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.164815] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7b606a63-381d-41b3-995a-bb0196e69818 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "e5d9de80-1ee5-462a-8459-168fd60e1972" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.695s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.313199] env[69992]: DEBUG oslo_vmware.api [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Task: {'id': task-2897312, 'name': PowerOnVM_Task, 'duration_secs': 0.974666} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.313522] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1116.313729] env[69992]: DEBUG nova.compute.manager [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.314547] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc412329-e838-48dc-ac02-73a00e00df14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.385103] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897313, 'name': ReconfigVM_Task, 'duration_secs': 0.314102} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.385423] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079/9df7b187-e579-41b0-9d24-be2a1ae93079.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.386069] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0cfcb79-ea2b-4e4a-ae9b-53cb1a55fb9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.395024] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1116.395024] env[69992]: value = "task-2897314" [ 1116.395024] env[69992]: _type = "Task" [ 1116.395024] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.400431] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897314, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.470333] env[69992]: INFO nova.compute.manager [-] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Took 1.43 seconds to deallocate network for instance. [ 1116.556941] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "refresh_cache-546fb923-4574-4407-8625-69e6c4d8d35e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.557277] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Instance network_info: |[{"id": "37beebe5-49d5-45f4-9dff-8ea169c1920f", "address": "fa:16:3e:cb:49:9c", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37beebe5-49", "ovs_interfaceid": "37beebe5-49d5-45f4-9dff-8ea169c1920f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1116.557985] env[69992]: DEBUG oslo_concurrency.lockutils [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] Acquired lock "refresh_cache-546fb923-4574-4407-8625-69e6c4d8d35e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.557985] env[69992]: DEBUG nova.network.neutron [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Refreshing network info cache for port 37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1116.558970] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:49:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37beebe5-49d5-45f4-9dff-8ea169c1920f', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1116.567014] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.568147] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1116.568229] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9516141f-b8f0-4bb0-8fc8-9393ed138be4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.589127] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1116.589127] env[69992]: value = "task-2897315" [ 1116.589127] env[69992]: _type = "Task" [ 1116.589127] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.597135] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897315, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.846811] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.905652] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897314, 'name': Rename_Task, 'duration_secs': 0.138289} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.906128] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1116.906489] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aef38dc0-a048-4a0c-9c5a-4b7f255ccccc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.917117] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1116.917117] env[69992]: value = "task-2897316" [ 1116.917117] env[69992]: _type = "Task" [ 1116.917117] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.922258] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.977768] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.101301] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897315, 'name': CreateVM_Task, 'duration_secs': 0.421296} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.101480] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1117.102638] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.102638] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.102759] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1117.103366] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e0f300c-0cb6-4ece-bd38-214a81e49055 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.112655] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1117.112655] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52be0d83-d641-d313-3d88-55a6abd22617" [ 1117.112655] env[69992]: _type = "Task" [ 1117.112655] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.121296] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52be0d83-d641-d313-3d88-55a6abd22617, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.190759] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb756339-06d5-489a-b6b9-cefd6497d478 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.200461] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f678d9c5-c165-4cb9-9c31-ad7acf986fd9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.239808] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3a7e77-da61-4e6a-81b5-1f1ecaed1651 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.247955] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b228be37-59de-4986-afc0-f7467a26437c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.263160] env[69992]: DEBUG nova.compute.provider_tree [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.282428] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.282723] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.282939] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.283185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.283372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.285672] env[69992]: INFO nova.compute.manager [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Terminating instance [ 1117.424421] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897316, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.509838] env[69992]: DEBUG nova.network.neutron [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Updated VIF entry in instance network info cache for port 37beebe5-49d5-45f4-9dff-8ea169c1920f. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1117.510231] env[69992]: DEBUG nova.network.neutron [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Updating instance_info_cache with network_info: [{"id": "37beebe5-49d5-45f4-9dff-8ea169c1920f", "address": "fa:16:3e:cb:49:9c", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37beebe5-49", "ovs_interfaceid": "37beebe5-49d5-45f4-9dff-8ea169c1920f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.535792] env[69992]: DEBUG nova.compute.manager [req-c79b7518-637b-4f7a-813f-aee9edddbfe1 req-a1cbf40e-c684-4fc8-a3ae-49cfd7f5ca7d service nova] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Received event network-vif-deleted-f0ba32e1-39c0-4939-aafa-23959e961f93 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.622852] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52be0d83-d641-d313-3d88-55a6abd22617, 'name': SearchDatastore_Task, 'duration_secs': 0.011025} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.623160] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.623398] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1117.623668] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.623831] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.624025] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1117.624281] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c29f03c3-6ee0-4946-9ee5-272e2d2cb4c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.632613] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1117.632836] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1117.633508] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ca1a42a-4c90-45ba-8729-6ced81cf565d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.638653] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1117.638653] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bb1232-d0bf-d53d-3d31-e08536249f05" [ 1117.638653] env[69992]: _type = "Task" [ 1117.638653] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.647714] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bb1232-d0bf-d53d-3d31-e08536249f05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.766576] env[69992]: DEBUG nova.scheduler.client.report [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.793706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "refresh_cache-7932a42f-6a62-4c2c-be9a-3cb518fe4183" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.793933] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquired lock "refresh_cache-7932a42f-6a62-4c2c-be9a-3cb518fe4183" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.794129] env[69992]: DEBUG nova.network.neutron [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.925603] env[69992]: DEBUG oslo_vmware.api [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897316, 'name': PowerOnVM_Task, 'duration_secs': 0.544947} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.925893] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1117.926099] env[69992]: INFO nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1117.926287] env[69992]: DEBUG nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1117.927083] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c572744a-c9c2-46b3-a6af-c0e6c4c17e73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.972164] env[69992]: DEBUG nova.network.neutron [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Port 47e06987-ed7c-4f19-8716-20716e1056c3 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1117.972453] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.972608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.972778] env[69992]: DEBUG nova.network.neutron [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1118.013585] env[69992]: DEBUG oslo_concurrency.lockutils [req-ddc4c74f-1676-4e2d-b6b8-1eb8a961d506 req-13da22c5-4241-4a8b-ab4e-c5e9efbf5ff9 service nova] Releasing lock "refresh_cache-546fb923-4574-4407-8625-69e6c4d8d35e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.150818] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bb1232-d0bf-d53d-3d31-e08536249f05, 'name': SearchDatastore_Task, 'duration_secs': 0.00899} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.151702] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-316ff122-bde6-4485-b084-b4d5739f05fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.157042] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1118.157042] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52dd81bb-6e7c-872d-d616-2967f7a7b5ed" [ 1118.157042] env[69992]: _type = "Task" [ 1118.157042] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.164836] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52dd81bb-6e7c-872d-d616-2967f7a7b5ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.274467] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.274467] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1118.278375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.907s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.279778] env[69992]: INFO nova.compute.claims [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.324017] env[69992]: DEBUG nova.network.neutron [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1118.410198] env[69992]: DEBUG nova.network.neutron [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.447014] env[69992]: INFO nova.compute.manager [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Took 57.20 seconds to build instance. [ 1118.669675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.669937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.675051] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52dd81bb-6e7c-872d-d616-2967f7a7b5ed, 'name': SearchDatastore_Task, 'duration_secs': 0.010523} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.675522] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.675782] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 546fb923-4574-4407-8625-69e6c4d8d35e/546fb923-4574-4407-8625-69e6c4d8d35e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1118.676040] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3c70b26-1f89-41de-8b41-e03634218a5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.683775] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1118.683775] env[69992]: value = "task-2897317" [ 1118.683775] env[69992]: _type = "Task" [ 1118.683775] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.691607] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.784501] env[69992]: DEBUG nova.compute.utils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1118.791794] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1118.792083] env[69992]: DEBUG nova.network.neutron [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1118.834081] env[69992]: DEBUG nova.network.neutron [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.838423] env[69992]: DEBUG nova.policy [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22958db7f0e94b1887f1cebfef8d1f3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bb19b95496548c084be8a8c87b8cd94', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1118.915109] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Releasing lock "refresh_cache-7932a42f-6a62-4c2c-be9a-3cb518fe4183" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.915109] env[69992]: DEBUG nova.compute.manager [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1118.915109] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1118.916305] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae35181c-8260-4638-9bf2-f6ade70e0049 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.926538] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1118.926538] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8a3ba38-1d19-49aa-9366-ca72fd52ff0a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.934961] env[69992]: DEBUG oslo_vmware.api [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1118.934961] env[69992]: value = "task-2897318" [ 1118.934961] env[69992]: _type = "Task" [ 1118.934961] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.944200] env[69992]: DEBUG oslo_vmware.api [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.949384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7999182-9c88-4a08-856c-2ba23e715c68 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.531s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.176759] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1119.194038] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469684} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.194316] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 546fb923-4574-4407-8625-69e6c4d8d35e/546fb923-4574-4407-8625-69e6c4d8d35e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1119.194539] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1119.194793] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c91f74f-a5e9-4c29-8d0f-39e4394e68b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.201490] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1119.201490] env[69992]: value = "task-2897319" [ 1119.201490] env[69992]: _type = "Task" [ 1119.201490] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.210274] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.220864] env[69992]: DEBUG nova.network.neutron [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Successfully created port: 4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1119.292699] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1119.339412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.449260] env[69992]: DEBUG oslo_vmware.api [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897318, 'name': PowerOffVM_Task, 'duration_secs': 0.151821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.451942] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1119.452146] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1119.453444] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b47efce3-370c-434e-92ff-e1698cb80544 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.479868] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1119.480111] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1119.480569] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Deleting the datastore file [datastore1] 7932a42f-6a62-4c2c-be9a-3cb518fe4183 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1119.480654] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b72bab9-0b9a-4884-bc14-3d8410d19e8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.486709] env[69992]: DEBUG oslo_vmware.api [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for the task: (returnval){ [ 1119.486709] env[69992]: value = "task-2897321" [ 1119.486709] env[69992]: _type = "Task" [ 1119.486709] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.496846] env[69992]: DEBUG oslo_vmware.api [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.698175] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.713089] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072342} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.713351] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1119.714174] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235b12af-b0f2-4fa6-bfb2-6cb360b4a0c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.735810] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 546fb923-4574-4407-8625-69e6c4d8d35e/546fb923-4574-4407-8625-69e6c4d8d35e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1119.738509] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfd883c4-6e1f-4faa-9680-af7f3dffa088 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.758850] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1119.758850] env[69992]: value = "task-2897322" [ 1119.758850] env[69992]: _type = "Task" [ 1119.758850] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.774518] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897322, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.826993] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046ea9c7-d7bf-4bb3-9806-35947f90b25f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.834860] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf5b8a0-08e3-41ef-875a-b961d0d204b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.842077] env[69992]: DEBUG nova.compute.manager [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69992) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1119.842301] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.867579] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d59ba65-4821-461a-9db3-03190e7b7317 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.875277] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8160bf9c-2ac9-423f-9d6f-19349c6d59a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.889251] env[69992]: DEBUG nova.compute.provider_tree [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.996343] env[69992]: DEBUG oslo_vmware.api [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Task: {'id': task-2897321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343005} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.996603] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1119.996787] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1119.997076] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1119.997154] env[69992]: INFO nova.compute.manager [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1119.997381] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1119.997568] env[69992]: DEBUG nova.compute.manager [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1119.997662] env[69992]: DEBUG nova.network.neutron [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1120.015059] env[69992]: DEBUG nova.network.neutron [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1120.269804] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897322, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.285699] env[69992]: DEBUG nova.compute.manager [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1120.304226] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1120.336657] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1120.337128] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.337404] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1120.337728] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.337997] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1120.338298] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1120.338667] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1120.339387] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1120.339387] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1120.339628] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1120.341305] env[69992]: DEBUG nova.virt.hardware [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1120.342533] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59badbb-b58d-49a3-b156-45d7d6091d12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.358532] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5ef037-df4d-4981-bfba-1447df837a63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.392383] env[69992]: DEBUG nova.scheduler.client.report [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.517077] env[69992]: DEBUG nova.network.neutron [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.607149] env[69992]: DEBUG nova.compute.manager [req-245defa9-914b-4ace-830f-21ac7568f254 req-bd28eb67-5ca6-4a11-8d92-909b1f5c0fe8 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Received event network-vif-plugged-4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1120.607596] env[69992]: DEBUG oslo_concurrency.lockutils [req-245defa9-914b-4ace-830f-21ac7568f254 req-bd28eb67-5ca6-4a11-8d92-909b1f5c0fe8 service nova] Acquiring lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.607827] env[69992]: DEBUG oslo_concurrency.lockutils [req-245defa9-914b-4ace-830f-21ac7568f254 req-bd28eb67-5ca6-4a11-8d92-909b1f5c0fe8 service nova] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.607999] env[69992]: DEBUG oslo_concurrency.lockutils [req-245defa9-914b-4ace-830f-21ac7568f254 req-bd28eb67-5ca6-4a11-8d92-909b1f5c0fe8 service nova] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.608190] env[69992]: DEBUG nova.compute.manager [req-245defa9-914b-4ace-830f-21ac7568f254 req-bd28eb67-5ca6-4a11-8d92-909b1f5c0fe8 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] No waiting events found dispatching network-vif-plugged-4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1120.608366] env[69992]: WARNING nova.compute.manager [req-245defa9-914b-4ace-830f-21ac7568f254 req-bd28eb67-5ca6-4a11-8d92-909b1f5c0fe8 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Received unexpected event network-vif-plugged-4e3de664-c228-44f8-84f3-2c40c903246e for instance with vm_state building and task_state spawning. [ 1120.709400] env[69992]: DEBUG nova.network.neutron [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Successfully updated port: 4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.770072] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897322, 'name': ReconfigVM_Task, 'duration_secs': 0.66199} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.770370] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 546fb923-4574-4407-8625-69e6c4d8d35e/546fb923-4574-4407-8625-69e6c4d8d35e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.770988] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ead21d1c-0d63-411e-bd92-c2e996950f77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.778374] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1120.778374] env[69992]: value = "task-2897323" [ 1120.778374] env[69992]: _type = "Task" [ 1120.778374] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.787249] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897323, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.806481] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.898031] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.898031] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1120.904414] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.665s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.907034] env[69992]: INFO nova.compute.claims [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1121.019602] env[69992]: INFO nova.compute.manager [-] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Took 1.02 seconds to deallocate network for instance. [ 1121.210706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.210706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.210897] env[69992]: DEBUG nova.network.neutron [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.288177] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897323, 'name': Rename_Task, 'duration_secs': 0.181086} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.288478] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1121.288722] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bbc58d5-808c-4ee6-8725-42026f0acbd4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.294341] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1121.294341] env[69992]: value = "task-2897324" [ 1121.294341] env[69992]: _type = "Task" [ 1121.294341] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.301668] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.412180] env[69992]: DEBUG nova.compute.utils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1121.415540] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1121.415655] env[69992]: DEBUG nova.network.neutron [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1121.456163] env[69992]: DEBUG nova.policy [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d2ee1abedf4874bcb44b4076199da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b8716c4b7324052a3472734c655655a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1121.526280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.732091] env[69992]: DEBUG nova.network.neutron [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Successfully created port: c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1121.756044] env[69992]: DEBUG nova.network.neutron [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1121.806790] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897324, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.916237] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1122.035689] env[69992]: DEBUG nova.network.neutron [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [{"id": "4e3de664-c228-44f8-84f3-2c40c903246e", "address": "fa:16:3e:b0:84:44", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3de664-c2", "ovs_interfaceid": "4e3de664-c228-44f8-84f3-2c40c903246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.308831] env[69992]: DEBUG oslo_vmware.api [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897324, 'name': PowerOnVM_Task, 'duration_secs': 0.812562} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.312364] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1122.312679] env[69992]: INFO nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1122.312938] env[69992]: DEBUG nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1122.314676] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e753cab2-bfa3-4301-b3ca-90f0cf99c0fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.429759] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9387cb4-9fcd-493c-b329-d8b33799b016 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.437154] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dd6baa-4ce3-489f-976a-9bdcfd278036 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.476119] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4017b41-f272-4a90-bc56-f186ff0c7dc8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.485287] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43670842-ce87-4dea-a9e4-4247254c5c9e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.496943] env[69992]: DEBUG nova.compute.provider_tree [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.538192] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.538511] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Instance network_info: |[{"id": "4e3de664-c228-44f8-84f3-2c40c903246e", "address": "fa:16:3e:b0:84:44", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3de664-c2", "ovs_interfaceid": "4e3de664-c228-44f8-84f3-2c40c903246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1122.538865] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:84:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e3de664-c228-44f8-84f3-2c40c903246e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1122.546602] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1122.547195] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1122.547441] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eef11958-9664-460b-91a8-700c9eaadaf4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.568462] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1122.568462] env[69992]: value = "task-2897325" [ 1122.568462] env[69992]: _type = "Task" [ 1122.568462] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.575719] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897325, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.740100] env[69992]: DEBUG nova.compute.manager [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Received event network-changed-4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1122.740315] env[69992]: DEBUG nova.compute.manager [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Refreshing instance network info cache due to event network-changed-4e3de664-c228-44f8-84f3-2c40c903246e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1122.741813] env[69992]: DEBUG oslo_concurrency.lockutils [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] Acquiring lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.741813] env[69992]: DEBUG oslo_concurrency.lockutils [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] Acquired lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.741813] env[69992]: DEBUG nova.network.neutron [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Refreshing network info cache for port 4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.839707] env[69992]: INFO nova.compute.manager [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Took 57.51 seconds to build instance. [ 1122.929245] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1122.954024] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1122.954024] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.954251] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.955223] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.955223] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.955223] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1122.955223] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1122.955223] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1122.955223] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1122.955498] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1122.955498] env[69992]: DEBUG nova.virt.hardware [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1122.956383] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9719e14f-fc35-40fe-835a-6dd5258b4ab9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.964084] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfd737d-0311-4c13-966b-4ab073dc906f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.004083] env[69992]: DEBUG nova.scheduler.client.report [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1123.078978] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897325, 'name': CreateVM_Task, 'duration_secs': 0.335069} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.079260] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1123.080017] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.080114] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.080453] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1123.080720] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4233431-1fc8-46ca-84de-0bfbfc90f6f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.085819] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1123.085819] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5243578b-faaf-722f-c8ca-886d268fd567" [ 1123.085819] env[69992]: _type = "Task" [ 1123.085819] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.094981] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5243578b-faaf-722f-c8ca-886d268fd567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.237915] env[69992]: DEBUG nova.network.neutron [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Successfully updated port: c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1123.341571] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a5b0115-74d5-4a46-ac1f-c81b7ded5545 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "546fb923-4574-4407-8625-69e6c4d8d35e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.555s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.445868] env[69992]: DEBUG nova.network.neutron [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updated VIF entry in instance network info cache for port 4e3de664-c228-44f8-84f3-2c40c903246e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.447112] env[69992]: DEBUG nova.network.neutron [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [{"id": "4e3de664-c228-44f8-84f3-2c40c903246e", "address": "fa:16:3e:b0:84:44", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3de664-c2", "ovs_interfaceid": "4e3de664-c228-44f8-84f3-2c40c903246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.510861] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.511441] env[69992]: DEBUG nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1123.513929] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.657s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.514159] env[69992]: DEBUG nova.objects.instance [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lazy-loading 'resources' on Instance uuid efa06ccc-be20-4d0e-938f-01c91ef4de8e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.596262] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5243578b-faaf-722f-c8ca-886d268fd567, 'name': SearchDatastore_Task, 'duration_secs': 0.009731} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.596546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.596782] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.597024] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.597178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.597353] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.597612] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8395903-6ff4-4df8-acfa-24a4749fd188 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.606178] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.606356] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.607136] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-598d6858-afa2-48a5-ac3a-7af2c737e5be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.612240] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1123.612240] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524645c2-4da6-dd2f-60c8-e8416f052555" [ 1123.612240] env[69992]: _type = "Task" [ 1123.612240] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.619810] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524645c2-4da6-dd2f-60c8-e8416f052555, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.739827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-f2ac32d7-d32b-497a-a262-ab1cd95f87d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.739995] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-f2ac32d7-d32b-497a-a262-ab1cd95f87d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.740181] env[69992]: DEBUG nova.network.neutron [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1123.948576] env[69992]: DEBUG oslo_concurrency.lockutils [req-b87a3f30-2936-4c5c-b228-317f659c8fba req-91c53488-1ce7-4598-91c3-dbec57635810 service nova] Releasing lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.017656] env[69992]: DEBUG nova.compute.utils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1124.022056] env[69992]: DEBUG nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1124.124847] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524645c2-4da6-dd2f-60c8-e8416f052555, 'name': SearchDatastore_Task, 'duration_secs': 0.008186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.125639] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47368490-fa1d-4bca-b909-36aa77b768fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.133941] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1124.133941] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52df5587-60c9-931d-8b57-a9b23f81c642" [ 1124.133941] env[69992]: _type = "Task" [ 1124.133941] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.141237] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52df5587-60c9-931d-8b57-a9b23f81c642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.273411] env[69992]: DEBUG nova.network.neutron [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1124.399332] env[69992]: DEBUG nova.compute.manager [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.400306] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580b9e3a-006f-4df0-ad7c-759c998b4126 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.478609] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82463a87-697c-49d7-9080-da3e8be02b11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.486532] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf2cc47-fbbf-499f-9363-830b78bfb388 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.519236] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d25e580-7ac5-4b44-96ca-58debb051784 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.522575] env[69992]: DEBUG nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1124.532850] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355804ab-a42e-4bb1-9eff-68e4af87e94a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.547078] env[69992]: DEBUG nova.compute.provider_tree [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.630329] env[69992]: DEBUG nova.network.neutron [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Updating instance_info_cache with network_info: [{"id": "c83362f2-db37-45fa-9d49-76899f0edc31", "address": "fa:16:3e:44:f7:70", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83362f2-db", "ovs_interfaceid": "c83362f2-db37-45fa-9d49-76899f0edc31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.644289] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52df5587-60c9-931d-8b57-a9b23f81c642, 'name': SearchDatastore_Task, 'duration_secs': 0.009729} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.645145] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.645416] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] f64108ec-c3b2-4b11-9085-2c56b0de93f5/f64108ec-c3b2-4b11-9085-2c56b0de93f5.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1124.645676] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02fef77f-4143-4cfc-9bb6-f606c70b19bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.652658] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1124.652658] env[69992]: value = "task-2897326" [ 1124.652658] env[69992]: _type = "Task" [ 1124.652658] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.660523] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.814329] env[69992]: DEBUG nova.compute.manager [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Received event network-vif-plugged-c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1124.814566] env[69992]: DEBUG oslo_concurrency.lockutils [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] Acquiring lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.814807] env[69992]: DEBUG oslo_concurrency.lockutils [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.814944] env[69992]: DEBUG oslo_concurrency.lockutils [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.815132] env[69992]: DEBUG nova.compute.manager [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] No waiting events found dispatching network-vif-plugged-c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1124.815742] env[69992]: WARNING nova.compute.manager [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Received unexpected event network-vif-plugged-c83362f2-db37-45fa-9d49-76899f0edc31 for instance with vm_state building and task_state spawning. [ 1124.815742] env[69992]: DEBUG nova.compute.manager [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Received event network-changed-c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1124.815742] env[69992]: DEBUG nova.compute.manager [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Refreshing instance network info cache due to event network-changed-c83362f2-db37-45fa-9d49-76899f0edc31. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1124.815742] env[69992]: DEBUG oslo_concurrency.lockutils [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] Acquiring lock "refresh_cache-f2ac32d7-d32b-497a-a262-ab1cd95f87d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.914955] env[69992]: INFO nova.compute.manager [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] instance snapshotting [ 1124.918178] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3346311a-d008-42e2-83ae-35d55908af16 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.938540] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44380244-1552-4a7f-9912-a46ee865ca14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.051087] env[69992]: DEBUG nova.scheduler.client.report [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.134053] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-f2ac32d7-d32b-497a-a262-ab1cd95f87d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.134053] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Instance network_info: |[{"id": "c83362f2-db37-45fa-9d49-76899f0edc31", "address": "fa:16:3e:44:f7:70", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83362f2-db", "ovs_interfaceid": "c83362f2-db37-45fa-9d49-76899f0edc31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1125.134611] env[69992]: DEBUG oslo_concurrency.lockutils [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] Acquired lock "refresh_cache-f2ac32d7-d32b-497a-a262-ab1cd95f87d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.134799] env[69992]: DEBUG nova.network.neutron [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Refreshing network info cache for port c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.136448] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:f7:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c83362f2-db37-45fa-9d49-76899f0edc31', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.143920] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.146796] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1125.147283] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62b2e8db-2a15-420c-aac7-d932cf45a007 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.173892] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897326, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.175238] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.175238] env[69992]: value = "task-2897327" [ 1125.175238] env[69992]: _type = "Task" [ 1125.175238] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.183462] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897327, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.379106] env[69992]: DEBUG nova.network.neutron [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Updated VIF entry in instance network info cache for port c83362f2-db37-45fa-9d49-76899f0edc31. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.379679] env[69992]: DEBUG nova.network.neutron [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Updating instance_info_cache with network_info: [{"id": "c83362f2-db37-45fa-9d49-76899f0edc31", "address": "fa:16:3e:44:f7:70", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc83362f2-db", "ovs_interfaceid": "c83362f2-db37-45fa-9d49-76899f0edc31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.449881] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1125.450214] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-39e1f867-5c13-485d-86d4-add87b1d2cba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.457944] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1125.457944] env[69992]: value = "task-2897328" [ 1125.457944] env[69992]: _type = "Task" [ 1125.457944] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.465954] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897328, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.533040] env[69992]: DEBUG nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1125.556229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.561075] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.561366] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.561498] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.561681] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.561829] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.561982] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.562207] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.562369] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.562542] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.562707] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.562882] env[69992]: DEBUG nova.virt.hardware [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.564043] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 47.557s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.564043] env[69992]: DEBUG nova.objects.instance [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1125.567480] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d2d49d-f346-41cd-8067-94b85f09ab38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.576913] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88786561-d43a-40be-93bc-a8dd13a4d217 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.593990] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.600875] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Creating folder: Project (9ec9fb7c918b4b199a21ec8ea6f45d07). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1125.602237] env[69992]: INFO nova.scheduler.client.report [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Deleted allocations for instance efa06ccc-be20-4d0e-938f-01c91ef4de8e [ 1125.603585] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08acffb9-b6de-4a7f-8496-d2c01bc7fd18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.620773] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Created folder: Project (9ec9fb7c918b4b199a21ec8ea6f45d07) in parent group-v581821. [ 1125.621018] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Creating folder: Instances. Parent ref: group-v582007. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1125.621317] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b8138f1-e777-4ef4-ba4c-e049a6a6ec3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.632632] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Created folder: Instances in parent group-v582007. [ 1125.632914] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.633144] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1125.633367] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-888af7e5-9df7-41f8-bef3-d27ffe3cd40c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.651677] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.651677] env[69992]: value = "task-2897331" [ 1125.651677] env[69992]: _type = "Task" [ 1125.651677] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.660011] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897331, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.673577] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897326, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.683693] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897327, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.885180] env[69992]: DEBUG oslo_concurrency.lockutils [req-20865b3b-fd36-4385-84d9-520124c36bba req-789cbe8d-312f-485f-82c9-4d17f087ea88 service nova] Releasing lock "refresh_cache-f2ac32d7-d32b-497a-a262-ab1cd95f87d0" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.968386] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897328, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.114620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44e56bf1-7056-4880-a899-b29ffbc7dd87 tempest-InstanceActionsTestJSON-1780139484 tempest-InstanceActionsTestJSON-1780139484-project-member] Lock "efa06ccc-be20-4d0e-938f-01c91ef4de8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.733s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.165158] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897331, 'name': CreateVM_Task, 'duration_secs': 0.303053} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.168027] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1126.168533] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.168776] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.169251] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1126.169842] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-445b6400-4a8a-475a-b826-7f2fe2569e6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.174585] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897326, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.452217} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.175238] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] f64108ec-c3b2-4b11-9085-2c56b0de93f5/f64108ec-c3b2-4b11-9085-2c56b0de93f5.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1126.175837] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1126.176179] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55859b9f-001a-4403-8b3d-912faf73a05e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.181495] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1126.181495] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5201f369-bca7-9d7e-470e-f09388402f8a" [ 1126.181495] env[69992]: _type = "Task" [ 1126.181495] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.188748] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897327, 'name': CreateVM_Task, 'duration_secs': 0.534062} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.190070] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1126.190457] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1126.190457] env[69992]: value = "task-2897332" [ 1126.190457] env[69992]: _type = "Task" [ 1126.190457] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.191141] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.197867] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5201f369-bca7-9d7e-470e-f09388402f8a, 'name': SearchDatastore_Task, 'duration_secs': 0.009635} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.198526] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.198845] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1126.199151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.199363] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.199620] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.202590] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.202976] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1126.203291] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f543e24-31fa-4269-bb4b-8d3664f2ff3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.204971] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897332, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.205247] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed67871c-ab91-4db4-a8bc-1c4a73221d3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.209334] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1126.209334] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5274e3f7-cdd2-35f7-8aa6-6823f1c6dd5d" [ 1126.209334] env[69992]: _type = "Task" [ 1126.209334] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.213433] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.213676] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1126.214652] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3928650-ec9f-48b9-a15b-82c741677b63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.219228] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5274e3f7-cdd2-35f7-8aa6-6823f1c6dd5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.222008] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1126.222008] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528e42ae-7d41-5d07-583d-13d3acbb66e7" [ 1126.222008] env[69992]: _type = "Task" [ 1126.222008] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.229264] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528e42ae-7d41-5d07-583d-13d3acbb66e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.470932] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897328, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.576490] env[69992]: DEBUG oslo_concurrency.lockutils [None req-95eb29de-1e34-4c2f-829c-7103f04f8ad3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.577672] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 48.266s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.703841] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897332, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065165} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.708196] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1126.709048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8baac6-d98e-4f76-8a69-6704f747514f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.748065] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] f64108ec-c3b2-4b11-9085-2c56b0de93f5/f64108ec-c3b2-4b11-9085-2c56b0de93f5.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.755012] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eafc978c-a4bd-4fae-9826-ab3e1f209f6d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.768999] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5274e3f7-cdd2-35f7-8aa6-6823f1c6dd5d, 'name': SearchDatastore_Task, 'duration_secs': 0.00981} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.769338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.769571] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1126.769774] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.775861] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528e42ae-7d41-5d07-583d-13d3acbb66e7, 'name': SearchDatastore_Task, 'duration_secs': 0.008745} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.777626] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1126.777626] env[69992]: value = "task-2897333" [ 1126.777626] env[69992]: _type = "Task" [ 1126.777626] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.777922] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-337d6100-5b5e-4bdf-a81b-28d04b81b3e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.793933] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897333, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.798668] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1126.798668] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5267cedd-bd4d-9e63-a143-6b4c9179d1c7" [ 1126.798668] env[69992]: _type = "Task" [ 1126.798668] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.813972] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5267cedd-bd4d-9e63-a143-6b4c9179d1c7, 'name': SearchDatastore_Task, 'duration_secs': 0.013019} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.816250] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.816250] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1126.816250] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.816250] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.816250] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20521fda-5cae-4738-baac-e252e03e2f14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.817990] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9e7a638-b2f4-4e21-87ac-f4091fd62021 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.826731] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1126.826731] env[69992]: value = "task-2897334" [ 1126.826731] env[69992]: _type = "Task" [ 1126.826731] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.828342] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.828574] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1126.832631] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e459727b-272c-4fb4-9001-9e75648c4bac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.839916] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.841229] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1126.841229] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52da3e2b-9e59-1f4a-2edf-6397d272ce77" [ 1126.841229] env[69992]: _type = "Task" [ 1126.841229] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.848543] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52da3e2b-9e59-1f4a-2edf-6397d272ce77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.970404] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897328, 'name': CreateSnapshot_Task, 'duration_secs': 1.436497} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.970684] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1126.971609] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341e49f6-339e-436c-86b0-73853dd94a3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.087502] env[69992]: INFO nova.compute.claims [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.294594] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897333, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.337594] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490803} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.337864] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1127.338132] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1127.338442] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45751c7e-c8aa-470e-9935-77596908557f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.346590] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1127.346590] env[69992]: value = "task-2897335" [ 1127.346590] env[69992]: _type = "Task" [ 1127.346590] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.353594] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52da3e2b-9e59-1f4a-2edf-6397d272ce77, 'name': SearchDatastore_Task, 'duration_secs': 0.009909} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.354776] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a47f474e-0dbf-4255-9e92-6f1f6fd77249 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.360206] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.363209] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1127.363209] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d6d3ce-ba83-f183-0e9c-a681e0b97361" [ 1127.363209] env[69992]: _type = "Task" [ 1127.363209] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.371181] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d6d3ce-ba83-f183-0e9c-a681e0b97361, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.489373] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1127.489750] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-450abd65-41b8-4d42-9e34-85e6d60d0a90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.498437] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1127.498437] env[69992]: value = "task-2897336" [ 1127.498437] env[69992]: _type = "Task" [ 1127.498437] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.506950] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897336, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.597800] env[69992]: INFO nova.compute.resource_tracker [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating resource usage from migration a6bc0f28-8d6f-4922-895d-929bc5809dd2 [ 1127.796699] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897333, 'name': ReconfigVM_Task, 'duration_secs': 0.541104} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.797194] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Reconfigured VM instance instance-0000003f to attach disk [datastore1] f64108ec-c3b2-4b11-9085-2c56b0de93f5/f64108ec-c3b2-4b11-9085-2c56b0de93f5.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.797950] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07af5e45-fc2e-44bc-b9b8-100df55d9522 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.807197] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1127.807197] env[69992]: value = "task-2897337" [ 1127.807197] env[69992]: _type = "Task" [ 1127.807197] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.816941] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897337, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.856138] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068886} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.859232] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1127.859453] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3af5398-b359-44d4-95b6-88cfea3e5785 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.883611] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.889779] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bce29d7-d122-422b-b380-11b17ce716fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.911043] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d6d3ce-ba83-f183-0e9c-a681e0b97361, 'name': SearchDatastore_Task, 'duration_secs': 0.00984} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.911979] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.912391] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] f2ac32d7-d32b-497a-a262-ab1cd95f87d0/f2ac32d7-d32b-497a-a262-ab1cd95f87d0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1127.912755] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b1b2463-aff8-4eda-99ab-1f062ab11ded {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.916633] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1127.916633] env[69992]: value = "task-2897338" [ 1127.916633] env[69992]: _type = "Task" [ 1127.916633] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.920856] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1127.920856] env[69992]: value = "task-2897339" [ 1127.920856] env[69992]: _type = "Task" [ 1127.920856] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.930266] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.937798] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.008397] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897336, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.255055] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bf4eac-c319-40e6-9d7a-6a6baea6ad8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.259639] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0c2776-5254-4db6-8f61-2e43fef019c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.294335] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8faf68f-fe9c-487f-8e01-982662491a19 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.312583] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1e542b-b7e8-43e2-b4b1-e4d1d67e85c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.321645] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897337, 'name': Rename_Task, 'duration_secs': 0.148615} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.331504] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1128.331959] env[69992]: DEBUG nova.compute.provider_tree [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.333414] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a201558b-378f-4e26-bfb6-67670fd4f16d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.342615] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1128.342615] env[69992]: value = "task-2897340" [ 1128.342615] env[69992]: _type = "Task" [ 1128.342615] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.351591] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.428166] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897338, 'name': ReconfigVM_Task, 'duration_secs': 0.329542} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.428815] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1128.429474] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a5c5e6f-7c95-41da-b7a2-65ef97d6589f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.433887] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.440666] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1128.440666] env[69992]: value = "task-2897341" [ 1128.440666] env[69992]: _type = "Task" [ 1128.440666] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.448968] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897341, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.509382] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897336, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.836797] env[69992]: DEBUG nova.scheduler.client.report [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.854141] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897340, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.932039] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.951391] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897341, 'name': Rename_Task, 'duration_secs': 0.150915} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.951742] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1128.951913] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c277b1fd-d7c5-4180-a81d-85187b8bf179 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.961018] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1128.961018] env[69992]: value = "task-2897342" [ 1128.961018] env[69992]: _type = "Task" [ 1128.961018] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.968444] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897342, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.011572] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897336, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.342698] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.765s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.343097] env[69992]: INFO nova.compute.manager [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Migrating [ 1129.354227] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.846s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.354227] env[69992]: DEBUG nova.objects.instance [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'pci_requests' on Instance uuid dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.375670] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897340, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.434501] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897339, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.44168} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.434773] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] f2ac32d7-d32b-497a-a262-ab1cd95f87d0/f2ac32d7-d32b-497a-a262-ab1cd95f87d0.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1129.435011] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1129.435332] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-006b979a-3abc-4739-80cb-66bb194a05ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.443100] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1129.443100] env[69992]: value = "task-2897343" [ 1129.443100] env[69992]: _type = "Task" [ 1129.443100] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.454852] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.472045] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897342, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.510213] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897336, 'name': CloneVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.865844] env[69992]: DEBUG oslo_vmware.api [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897340, 'name': PowerOnVM_Task, 'duration_secs': 1.306829} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.866322] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1129.866598] env[69992]: INFO nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Took 9.56 seconds to spawn the instance on the hypervisor. [ 1129.866785] env[69992]: DEBUG nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1129.867577] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa12d151-5396-4215-84df-c2208f4b392c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.873069] env[69992]: DEBUG nova.objects.instance [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'numa_topology' on Instance uuid dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.874458] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.874646] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.874819] env[69992]: DEBUG nova.network.neutron [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.952781] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076059} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.953180] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1129.953936] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29e8be5-c70d-4c3e-a1c7-23678c724182 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.977587] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] f2ac32d7-d32b-497a-a262-ab1cd95f87d0/f2ac32d7-d32b-497a-a262-ab1cd95f87d0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.980720] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b413bbe-0a11-42b2-bf64-afd3c081aa90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.007216] env[69992]: DEBUG oslo_vmware.api [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897342, 'name': PowerOnVM_Task, 'duration_secs': 0.713736} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.007606] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1130.007606] env[69992]: value = "task-2897344" [ 1130.007606] env[69992]: _type = "Task" [ 1130.007606] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.008227] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.008470] env[69992]: INFO nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Took 4.48 seconds to spawn the instance on the hypervisor. [ 1130.008654] env[69992]: DEBUG nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1130.009485] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbdf513-46c9-4714-8821-890875ab707a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.017729] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897336, 'name': CloneVM_Task, 'duration_secs': 2.124307} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.018699] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Created linked-clone VM from snapshot [ 1130.019399] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7bc1fc-2525-48f8-89e6-7bfb0c48f430 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.026098] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.034708] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Uploading image 10b701c1-9a32-4c7e-a195-4676726c8b8e {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1130.065990] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1130.065990] env[69992]: value = "vm-582011" [ 1130.065990] env[69992]: _type = "VirtualMachine" [ 1130.065990] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1130.066285] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-583978e7-2873-42aa-b812-b401808f84d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.074474] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease: (returnval){ [ 1130.074474] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522ba4db-613c-2d34-9a3b-99540eb6d267" [ 1130.074474] env[69992]: _type = "HttpNfcLease" [ 1130.074474] env[69992]: } obtained for exporting VM: (result){ [ 1130.074474] env[69992]: value = "vm-582011" [ 1130.074474] env[69992]: _type = "VirtualMachine" [ 1130.074474] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1130.074844] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the lease: (returnval){ [ 1130.074844] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522ba4db-613c-2d34-9a3b-99540eb6d267" [ 1130.074844] env[69992]: _type = "HttpNfcLease" [ 1130.074844] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1130.082792] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1130.082792] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522ba4db-613c-2d34-9a3b-99540eb6d267" [ 1130.082792] env[69992]: _type = "HttpNfcLease" [ 1130.082792] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1130.377290] env[69992]: INFO nova.compute.claims [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.394827] env[69992]: INFO nova.compute.manager [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Took 62.14 seconds to build instance. [ 1131.288780] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45547c1c-e3d4-4b18-b6e5-de83efe881e0 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.880s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.289045] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.289229] env[69992]: WARNING oslo_vmware.common.loopingcall [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] task run outlasted interval by 0.27917899999999995 sec [ 1131.298724] env[69992]: DEBUG nova.compute.manager [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Received event network-changed-4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1131.298724] env[69992]: DEBUG nova.compute.manager [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Refreshing instance network info cache due to event network-changed-4e3de664-c228-44f8-84f3-2c40c903246e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1131.298898] env[69992]: DEBUG oslo_concurrency.lockutils [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] Acquiring lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.299052] env[69992]: DEBUG oslo_concurrency.lockutils [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] Acquired lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.299237] env[69992]: DEBUG nova.network.neutron [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Refreshing network info cache for port 4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1131.302140] env[69992]: INFO nova.compute.manager [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Took 56.09 seconds to build instance. [ 1131.315426] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897344, 'name': ReconfigVM_Task, 'duration_secs': 0.885693} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.315635] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1131.315635] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522ba4db-613c-2d34-9a3b-99540eb6d267" [ 1131.315635] env[69992]: _type = "HttpNfcLease" [ 1131.315635] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1131.315873] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Reconfigured VM instance instance-00000040 to attach disk [datastore1] f2ac32d7-d32b-497a-a262-ab1cd95f87d0/f2ac32d7-d32b-497a-a262-ab1cd95f87d0.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1131.316630] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1131.316630] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522ba4db-613c-2d34-9a3b-99540eb6d267" [ 1131.316630] env[69992]: _type = "HttpNfcLease" [ 1131.316630] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1131.317058] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2212d304-471e-4eed-95ac-9831de433f36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.319274] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80e648f-838c-43eb-9488-9ad803b4fa39 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.328534] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5289ca6b-71d7-b034-2bc0-5b0fdfd0d7f8/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1131.328894] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5289ca6b-71d7-b034-2bc0-5b0fdfd0d7f8/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1131.330984] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1131.330984] env[69992]: value = "task-2897346" [ 1131.330984] env[69992]: _type = "Task" [ 1131.330984] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.397572] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897346, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.436835] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e7aca26a-5592-4cc6-ab8a-3857522a520d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.526820] env[69992]: DEBUG nova.network.neutron [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.808358] env[69992]: DEBUG oslo_concurrency.lockutils [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.808581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.809213] env[69992]: INFO nova.compute.manager [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Rebooting instance [ 1131.812607] env[69992]: DEBUG oslo_concurrency.lockutils [None req-304f846e-09c7-4870-b5fc-b5e16346eccc tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.074s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.845859] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897346, 'name': Rename_Task, 'duration_secs': 0.168986} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.846867] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.849173] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eae72f01-fd97-4d80-8653-55e3d040ad8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.866766] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1131.866766] env[69992]: value = "task-2897347" [ 1131.866766] env[69992]: _type = "Task" [ 1131.866766] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.886673] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.031828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.224337] env[69992]: DEBUG nova.network.neutron [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updated VIF entry in instance network info cache for port 4e3de664-c228-44f8-84f3-2c40c903246e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.224337] env[69992]: DEBUG nova.network.neutron [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [{"id": "4e3de664-c228-44f8-84f3-2c40c903246e", "address": "fa:16:3e:b0:84:44", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3de664-c2", "ovs_interfaceid": "4e3de664-c228-44f8-84f3-2c40c903246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.355965] env[69992]: INFO nova.compute.manager [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Rebuilding instance [ 1132.360763] env[69992]: DEBUG oslo_concurrency.lockutils [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.394034] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897347, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.429835] env[69992]: DEBUG nova.compute.manager [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1132.430656] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43c9d15-298d-4da8-98d7-e53465d78757 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.592701] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042df687-10f9-4b63-8d1f-36b2dd0322e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.603054] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7da1b45-2633-46bf-96ce-1ab263587560 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.642303] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac9b6e0-c05a-4e0c-b4a8-657b7df70075 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.651029] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42009991-857b-42d4-9eb6-1255e6edbf8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.667023] env[69992]: DEBUG nova.compute.provider_tree [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.728918] env[69992]: DEBUG oslo_concurrency.lockutils [req-3b3ae8b7-d7b7-4af7-aabb-892b51fb2247 req-59e8a8ad-781e-41ef-ad4b-1cb440612368 service nova] Releasing lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.729422] env[69992]: DEBUG oslo_concurrency.lockutils [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquired lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.729617] env[69992]: DEBUG nova.network.neutron [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.878751] env[69992]: DEBUG oslo_vmware.api [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897347, 'name': PowerOnVM_Task, 'duration_secs': 0.598424} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.879207] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.879516] env[69992]: INFO nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Took 9.95 seconds to spawn the instance on the hypervisor. [ 1132.879694] env[69992]: DEBUG nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1132.880477] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4e974d-ef99-44e9-895a-93bf9dbf295b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.171113] env[69992]: DEBUG nova.scheduler.client.report [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.405149] env[69992]: INFO nova.compute.manager [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Took 61.05 seconds to build instance. [ 1133.449018] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1133.449382] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83b3846a-f12d-4372-8c31-6cb4e2a6df0f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.457498] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1133.457498] env[69992]: value = "task-2897348" [ 1133.457498] env[69992]: _type = "Task" [ 1133.457498] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.461710] env[69992]: DEBUG nova.network.neutron [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [{"id": "4e3de664-c228-44f8-84f3-2c40c903246e", "address": "fa:16:3e:b0:84:44", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3de664-c2", "ovs_interfaceid": "4e3de664-c228-44f8-84f3-2c40c903246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.470367] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.552640] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69173f0-1837-4d40-93c6-e1166392c96e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.572856] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1133.682172] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.328s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.684573] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 45.422s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.684767] env[69992]: DEBUG nova.objects.instance [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1133.718414] env[69992]: INFO nova.network.neutron [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating port d325d681-8643-43a2-93dd-d4687ad115f5 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1133.743560] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.743890] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.909519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-44137f84-db80-46b8-b64b-be62c816a571 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.423s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.966088] env[69992]: DEBUG oslo_concurrency.lockutils [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Releasing lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.975730] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897348, 'name': PowerOffVM_Task, 'duration_secs': 0.219116} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.976742] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1133.977666] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1133.978885] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f05bdac-cf68-4229-9254-dca67b087bfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.987484] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1133.987895] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab61f65a-21a4-43f0-ba50-952f0ccc8da5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.020239] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1134.020511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1134.020699] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Deleting the datastore file [datastore1] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1134.020962] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c369775-523b-484a-b1be-7274c6881932 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.028325] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1134.028325] env[69992]: value = "task-2897350" [ 1134.028325] env[69992]: _type = "Task" [ 1134.028325] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.036653] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.079209] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.079501] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-232d0700-39f0-4278-a8c5-fce957131e8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.086139] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1134.086139] env[69992]: value = "task-2897351" [ 1134.086139] env[69992]: _type = "Task" [ 1134.086139] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.096654] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1134.096902] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1134.246579] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1134.474817] env[69992]: DEBUG nova.compute.manager [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.475817] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99845441-51d4-41b6-affc-2d6e9fdd106b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.539579] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148038} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.539724] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.539781] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.539983] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.603334] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1134.603584] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1134.603744] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1134.603929] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1134.604096] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1134.604251] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1134.604461] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1134.604617] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1134.604778] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1134.605029] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1134.605116] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1134.610611] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-281c8073-2a98-4f74-82a3-534e967ba12b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.627631] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1134.627631] env[69992]: value = "task-2897352" [ 1134.627631] env[69992]: _type = "Task" [ 1134.627631] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.636385] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.696226] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7338a835-79f5-4e17-a5fd-a9dc1fb73ac3 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.697775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 45.333s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.771629] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.000709] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.000950] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.138827] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897352, 'name': ReconfigVM_Task, 'duration_secs': 0.312611} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.139186] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.326351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.326484] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.327752] env[69992]: DEBUG nova.network.neutron [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1135.355278] env[69992]: DEBUG nova.compute.manager [req-2576d6f5-2702-485a-9c67-c1c0ed8c108f req-8c62789e-1184-49ec-a55b-a8865097ef41 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-vif-plugged-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1135.355508] env[69992]: DEBUG oslo_concurrency.lockutils [req-2576d6f5-2702-485a-9c67-c1c0ed8c108f req-8c62789e-1184-49ec-a55b-a8865097ef41 service nova] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.355716] env[69992]: DEBUG oslo_concurrency.lockutils [req-2576d6f5-2702-485a-9c67-c1c0ed8c108f req-8c62789e-1184-49ec-a55b-a8865097ef41 service nova] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.355885] env[69992]: DEBUG oslo_concurrency.lockutils [req-2576d6f5-2702-485a-9c67-c1c0ed8c108f req-8c62789e-1184-49ec-a55b-a8865097ef41 service nova] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.356910] env[69992]: DEBUG nova.compute.manager [req-2576d6f5-2702-485a-9c67-c1c0ed8c108f req-8c62789e-1184-49ec-a55b-a8865097ef41 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] No waiting events found dispatching network-vif-plugged-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1135.357267] env[69992]: WARNING nova.compute.manager [req-2576d6f5-2702-485a-9c67-c1c0ed8c108f req-8c62789e-1184-49ec-a55b-a8865097ef41 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received unexpected event network-vif-plugged-d325d681-8643-43a2-93dd-d4687ad115f5 for instance with vm_state shelved_offloaded and task_state spawning. [ 1135.493039] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a147e51-8ddc-45f9-bd70-777f4ddee0e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.503651] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Doing hard reboot of VM {{(pid=69992) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1135.503995] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-5c92b314-10cd-4972-a468-121f09118b1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.507466] env[69992]: DEBUG nova.compute.utils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.516663] env[69992]: DEBUG oslo_vmware.api [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1135.516663] env[69992]: value = "task-2897353" [ 1135.516663] env[69992]: _type = "Task" [ 1135.516663] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.528587] env[69992]: DEBUG oslo_vmware.api [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897353, 'name': ResetVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.579555] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1135.579765] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.579929] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1135.580164] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.580330] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1135.580461] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1135.580693] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1135.580850] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1135.581644] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1135.581644] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1135.581644] env[69992]: DEBUG nova.virt.hardware [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1135.582254] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82170080-a07b-4440-8bcb-7c0a50b34467 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.590497] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9172ea-ef94-4794-9e1b-9a2927ab0f1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.604365] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.610034] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.610400] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.610637] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6bf502ae-1c5d-477e-a8ae-753db46e35e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.628234] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.628234] env[69992]: value = "task-2897354" [ 1135.628234] env[69992]: _type = "Task" [ 1135.628234] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.636603] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897354, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.646041] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1135.646378] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.646550] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1135.646733] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.646881] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1135.647060] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1135.647330] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1135.648028] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1135.648028] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1135.648028] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1135.648028] env[69992]: DEBUG nova.virt.hardware [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1135.654246] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Reconfiguring VM instance instance-00000024 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1135.654246] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95f2c9dc-050b-4233-a16a-0d8f09629aa4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.673028] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1135.673028] env[69992]: value = "task-2897355" [ 1135.673028] env[69992]: _type = "Task" [ 1135.673028] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.684941] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.721981] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Applying migration context for instance 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 as it has an incoming, in-progress migration 53d4b0cb-ec86-417c-87f6-76638a7b3c0b. Migration status is reverting {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1135.722232] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Applying migration context for instance a7f01cd7-f148-48fc-a71a-5461672d6039 as it has an incoming, in-progress migration a6bc0f28-8d6f-4922-895d-929bc5809dd2. Migration status is migrating {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1135.725252] env[69992]: INFO nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating resource usage from migration 53d4b0cb-ec86-417c-87f6-76638a7b3c0b [ 1135.725587] env[69992]: INFO nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating resource usage from migration a6bc0f28-8d6f-4922-895d-929bc5809dd2 [ 1135.725875] env[69992]: INFO nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating resource usage from migration a5252a08-401e-4a46-9c0d-2521390462d4 [ 1135.752536] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.752700] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 27492ef7-8258-4001-b3b3-5bcb94e12c1f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.752903] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a49b4721-e338-4e60-b91e-137caa3c9c03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753062] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1d436762-964d-40d9-871e-ee33c3ba25b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753186] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance eec50935-f553-43c7-b67b-7289299745bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753305] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a8813822-f77b-4b73-a6dc-e0eab83b0402 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753419] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance bcb5131c-b2c6-4971-8a2e-4fcd7133442d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753535] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753647] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753758] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance b7a1b9e1-4d57-435f-bdb6-51481968aacb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.753872] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance af07ebd0-5f12-49c3-a518-95be9a8d6c82 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.754015] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 7fc7c481-75e8-40f2-a971-752ce6dde59b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.754134] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a06d4b38-0e39-46ef-a588-7627661cb201 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.754319] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 673be00f-e3c5-4a54-beeb-cf89828e9e32 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1135.754468] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fcbe1142-72dc-4a02-af9b-e03a2031a247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.754626] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1b4da2ab-d026-45d8-8234-79ddd84d5cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.754782] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.754943] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 7932a42f-6a62-4c2c-be9a-3cb518fe4183 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1135.755110] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 131096fc-addf-4d9a-9cd7-4abe98aabd1f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1135.755252] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 086ac14d-74bb-4bb6-90b3-3e345b2894a9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1135.755408] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 32bdb15d-6a4d-4445-9b82-d18b0f6743b6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1135.755547] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance dd31269e-716c-44cd-9fc3-ce227fe5b3b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.755727] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 546fb923-4574-4407-8625-69e6c4d8d35e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.755910] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration 53d4b0cb-ec86-417c-87f6-76638a7b3c0b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1135.756050] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.756200] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance f64108ec-c3b2-4b11-9085-2c56b0de93f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.756324] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance f2ac32d7-d32b-497a-a262-ab1cd95f87d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1135.756448] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.012119] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.033398] env[69992]: DEBUG oslo_vmware.api [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897353, 'name': ResetVM_Task, 'duration_secs': 0.117193} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.033398] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Did hard reboot of VM {{(pid=69992) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1136.033398] env[69992]: DEBUG nova.compute.manager [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.033398] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57455b9c-a6b2-4e77-b44c-752bb2486f09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.104882] env[69992]: DEBUG nova.network.neutron [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.138647] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897354, 'name': CreateVM_Task, 'duration_secs': 0.505812} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.138796] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.139212] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.139415] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.139735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.140013] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4340bd5a-538f-4a38-a157-21db77c04d1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.144658] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1136.144658] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c575c4-a066-477c-13df-34ec0288a7f9" [ 1136.144658] env[69992]: _type = "Task" [ 1136.144658] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.152977] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c575c4-a066-477c-13df-34ec0288a7f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.182101] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897355, 'name': ReconfigVM_Task, 'duration_secs': 0.249052} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.182423] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Reconfigured VM instance instance-00000024 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1136.183228] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b588d88a-60aa-41a4-a547-7031d2d692e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.205675] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039/a7f01cd7-f148-48fc-a71a-5461672d6039.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.205844] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c238daa5-36db-4fa0-9712-a8f5937c600f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.223299] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1136.223299] env[69992]: value = "task-2897356" [ 1136.223299] env[69992]: _type = "Task" [ 1136.223299] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.231028] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.260166] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance bce01d14-3c1b-4dce-b61c-721e25a56497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.260362] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration a6bc0f28-8d6f-4922-895d-929bc5809dd2 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1136.260480] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a7f01cd7-f148-48fc-a71a-5461672d6039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.550973] env[69992]: DEBUG oslo_concurrency.lockutils [None req-615734dd-881b-4878-aae1-24476c44023f tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.742s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.608932] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.655036] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='86a3f08e23075089f4a78574aa6a8657',container_format='bare',created_at=2025-03-10T17:48:34Z,direct_url=,disk_format='vmdk',id=f4723384-8c26-48b3-817e-be7849f27178,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1629325456-shelved',owner='3f3a2959667e41f1b5868994454b21be',properties=ImageMetaProps,protected=,size=31671808,status='active',tags=,updated_at=2025-03-10T17:48:51Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1136.655449] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.655707] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1136.655994] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.656251] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1136.656469] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1136.656789] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1136.657042] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1136.657308] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1136.657626] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1136.657929] env[69992]: DEBUG nova.virt.hardware [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1136.659053] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6785a629-e799-404d-9c94-b19eb33572fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.671837] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c575c4-a066-477c-13df-34ec0288a7f9, 'name': SearchDatastore_Task, 'duration_secs': 0.015145} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.675213] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.675509] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.675804] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.675961] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.676169] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.676536] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be63fdec-1091-496b-b36c-491d3e4d03ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.679632] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6064e7e-5640-4f6a-abfd-a395f7abdfe1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.695165] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:cd:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd325d681-8643-43a2-93dd-d4687ad115f5', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1136.702822] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1136.704719] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1136.704719] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.704719] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.705834] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2388e369-879c-4a73-be43-473b0522668a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.719928] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949c327b-03dc-49c4-9ded-e72dd47c8aa8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.729901] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1136.729901] env[69992]: value = "task-2897357" [ 1136.729901] env[69992]: _type = "Task" [ 1136.729901] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.730332] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1136.730332] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525d4fb7-ab22-369d-84dd-6fa69ecfa011" [ 1136.730332] env[69992]: _type = "Task" [ 1136.730332] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.741101] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.747327] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897357, 'name': CreateVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.750890] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525d4fb7-ab22-369d-84dd-6fa69ecfa011, 'name': SearchDatastore_Task, 'duration_secs': 0.0153} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.751756] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aa86ea9-e387-492b-a209-a000e552d929 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.760019] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1136.760019] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52761d49-e23b-9a34-5cea-dec2b0fa6c94" [ 1136.760019] env[69992]: _type = "Task" [ 1136.760019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.766210] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 4e93b655-aaf4-49b8-bbb2-92287ec15bbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.767580] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52761d49-e23b-9a34-5cea-dec2b0fa6c94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.049261] env[69992]: DEBUG nova.compute.manager [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Received event network-changed-4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.050019] env[69992]: DEBUG nova.compute.manager [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Refreshing instance network info cache due to event network-changed-4e3de664-c228-44f8-84f3-2c40c903246e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1137.050597] env[69992]: DEBUG oslo_concurrency.lockutils [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] Acquiring lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.051059] env[69992]: DEBUG oslo_concurrency.lockutils [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] Acquired lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.051638] env[69992]: DEBUG nova.network.neutron [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Refreshing network info cache for port 4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1137.077365] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.077962] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.080306] env[69992]: INFO nova.compute.manager [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Attaching volume f9a2424a-1e64-4395-b297-954664b816f3 to /dev/sdb [ 1137.143619] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d45b06-4cb0-4ac6-a472-5e3f356e587d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.153223] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433510cc-d5e1-4f69-abfa-3fc7c0f3847b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.170451] env[69992]: DEBUG nova.virt.block_device [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Updating existing volume attachment record: 80b9d216-012a-4386-b7e4-760a21cd1617 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1137.239940] env[69992]: DEBUG oslo_vmware.api [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897356, 'name': ReconfigVM_Task, 'duration_secs': 0.66969} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.240899] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Reconfigured VM instance instance-00000024 to attach disk [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039/a7f01cd7-f148-48fc-a71a-5461672d6039.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.241019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.249135] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897357, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.273302] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance c1c90aa6-922d-4315-8ead-2263a55a5d6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1137.275571] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52761d49-e23b-9a34-5cea-dec2b0fa6c94, 'name': SearchDatastore_Task, 'duration_secs': 0.021094} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.276421] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.276868] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1137.277381] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-060b8586-e3fa-44ea-808f-1fb82cfdcce3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.287047] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1137.287047] env[69992]: value = "task-2897358" [ 1137.287047] env[69992]: _type = "Task" [ 1137.287047] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.297798] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.457522] env[69992]: DEBUG nova.compute.manager [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.457733] env[69992]: DEBUG nova.compute.manager [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing instance network info cache due to event network-changed-d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1137.458070] env[69992]: DEBUG oslo_concurrency.lockutils [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] Acquiring lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.458126] env[69992]: DEBUG oslo_concurrency.lockutils [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] Acquired lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.458285] env[69992]: DEBUG nova.network.neutron [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Refreshing network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1137.743377] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897357, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.753204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2956f69e-d8a1-409e-9823-8971f93ee47f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.778017] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 37751af7-267e-4693-aaa3-cd1bb9c3d950 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1137.783767] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f16a8b-552d-45e5-8fe0-c5eba0cf1628 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.814046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.814046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.814046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.814046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.814046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.815129] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.827034] env[69992]: INFO nova.compute.manager [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Terminating instance [ 1137.828933] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897358, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.888025] env[69992]: DEBUG nova.network.neutron [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updated VIF entry in instance network info cache for port 4e3de664-c228-44f8-84f3-2c40c903246e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1137.888407] env[69992]: DEBUG nova.network.neutron [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [{"id": "4e3de664-c228-44f8-84f3-2c40c903246e", "address": "fa:16:3e:b0:84:44", "network": {"id": "daf09f8e-2217-4777-9a09-57c09080946c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-520713710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bb19b95496548c084be8a8c87b8cd94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e3de664-c2", "ovs_interfaceid": "4e3de664-c228-44f8-84f3-2c40c903246e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.202655] env[69992]: DEBUG nova.network.neutron [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updated VIF entry in instance network info cache for port d325d681-8643-43a2-93dd-d4687ad115f5. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1138.202655] env[69992]: DEBUG nova.network.neutron [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [{"id": "d325d681-8643-43a2-93dd-d4687ad115f5", "address": "fa:16:3e:19:cd:26", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd325d681-86", "ovs_interfaceid": "d325d681-8643-43a2-93dd-d4687ad115f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.243961] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897357, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.287909] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1138.288175] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration a5252a08-401e-4a46-9c0d-2521390462d4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1138.288260] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 9df7b187-e579-41b0-9d24-be2a1ae93079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.299433] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584756} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.299731] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.299945] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.300225] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d780eea-3dc4-4cbc-b23d-c46221aef455 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.307085] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1138.307085] env[69992]: value = "task-2897362" [ 1138.307085] env[69992]: _type = "Task" [ 1138.307085] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.316435] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897362, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.332493] env[69992]: DEBUG nova.compute.manager [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1138.332743] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1138.333688] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833be61a-b9b1-49b3-990d-3669dba596d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.342605] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1138.342863] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2feb27ab-75ac-42d6-a0ee-1f3a8c78c000 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.349644] env[69992]: DEBUG oslo_vmware.api [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1138.349644] env[69992]: value = "task-2897363" [ 1138.349644] env[69992]: _type = "Task" [ 1138.349644] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.358166] env[69992]: DEBUG oslo_vmware.api [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.369111] env[69992]: DEBUG nova.network.neutron [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Port b39fa912-b02a-4764-8cc8-f79e08d575c6 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1138.391820] env[69992]: DEBUG oslo_concurrency.lockutils [req-11355740-8279-4c0c-9507-ba34a67684bf req-c94d1e70-6df0-4809-b5f5-cd9ce04f55a2 service nova] Releasing lock "refresh_cache-f64108ec-c3b2-4b11-9085-2c56b0de93f5" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.705055] env[69992]: DEBUG oslo_concurrency.lockutils [req-b26490f3-b4b6-4d66-9b62-26efd545799a req-9a547ad2-efa7-41cd-b1a3-aab1fa7c528f service nova] Releasing lock "refresh_cache-dd31269e-716c-44cd-9fc3-ce227fe5b3b2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.745656] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897357, 'name': CreateVM_Task, 'duration_secs': 1.562044} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.745954] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1138.746485] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.746656] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.747045] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1138.747314] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f55c3fd-4ccf-4c0f-a4df-ca35c9ac355b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.752056] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1138.752056] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bcdef0-b303-3fb7-1b5c-7f73a6fb7d55" [ 1138.752056] env[69992]: _type = "Task" [ 1138.752056] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.760247] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bcdef0-b303-3fb7-1b5c-7f73a6fb7d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.795036] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1138.795490] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 26 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1138.795653] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5696MB phys_disk=200GB used_disk=25GB total_vcpus=48 used_vcpus=26 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1138.816993] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897362, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096586} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.817361] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1138.818116] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4975fe3-83a8-4229-9f10-b37bc714418f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.839834] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.840179] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02784ac8-fcb6-4b1b-9c64-13f8715736fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.865456] env[69992]: DEBUG oslo_vmware.api [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897363, 'name': PowerOffVM_Task, 'duration_secs': 0.184066} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.866688] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.866868] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1138.867284] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1138.867284] env[69992]: value = "task-2897364" [ 1138.867284] env[69992]: _type = "Task" [ 1138.867284] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.869756] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4911f9c8-511c-43fb-80ce-caf4a9086c73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.887300] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.946029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1138.946230] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1138.946483] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Deleting the datastore file [datastore1] f64108ec-c3b2-4b11-9085-2c56b0de93f5 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1138.946824] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63e16ff2-4b56-41fa-9185-7cdcb605a462 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.955640] env[69992]: DEBUG oslo_vmware.api [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1138.955640] env[69992]: value = "task-2897366" [ 1138.955640] env[69992]: _type = "Task" [ 1138.955640] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.968606] env[69992]: DEBUG oslo_vmware.api [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.263240] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.263501] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Processing image f4723384-8c26-48b3-817e-be7849f27178 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1139.263741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.263889] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.264083] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1139.264342] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4435b5fb-02fb-4f98-8581-738451288b0a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.275695] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1139.275887] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1139.276638] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46c79373-568d-4256-b027-d8f2b58b4523 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.284420] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1139.284420] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5237bed7-5524-2888-3722-0f59676ceda3" [ 1139.284420] env[69992]: _type = "Task" [ 1139.284420] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.292145] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5237bed7-5524-2888-3722-0f59676ceda3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.327926] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5289ca6b-71d7-b034-2bc0-5b0fdfd0d7f8/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1139.329038] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede91962-b0cb-4f8f-9a8a-936cd53a77b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.338461] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5289ca6b-71d7-b034-2bc0-5b0fdfd0d7f8/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1139.338688] env[69992]: ERROR oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5289ca6b-71d7-b034-2bc0-5b0fdfd0d7f8/disk-0.vmdk due to incomplete transfer. [ 1139.338934] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b3e039b0-5347-4cc0-bb34-fddba783f690 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.345168] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb786361-4096-4785-8beb-0043fb8a7cc9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.348264] env[69992]: DEBUG oslo_vmware.rw_handles [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5289ca6b-71d7-b034-2bc0-5b0fdfd0d7f8/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1139.348453] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Uploaded image 10b701c1-9a32-4c7e-a195-4676726c8b8e to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1139.350569] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1139.351201] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d01d67a3-500e-4632-861a-62909dbd31d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.355412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b24e5fe-44ea-4089-aa5c-93056c304c87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.359523] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1139.359523] env[69992]: value = "task-2897367" [ 1139.359523] env[69992]: _type = "Task" [ 1139.359523] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.400178] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad857791-b45a-4cda-9bae-b1433b5ef8eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.410192] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897367, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.412564] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.412782] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.412949] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.419122] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897364, 'name': ReconfigVM_Task, 'duration_secs': 0.3816} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.421295] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3/4609d6ce-9d5b-408d-8cb6-1baf76d85bb3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1139.422525] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-471938a9-fea8-49c0-9c31-abe30226e5c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.424559] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e071a842-c714-4d8d-9efa-04d3d7dc11ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.440731] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1139.443058] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1139.443058] env[69992]: value = "task-2897368" [ 1139.443058] env[69992]: _type = "Task" [ 1139.443058] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.453368] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897368, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.470631] env[69992]: DEBUG oslo_vmware.api [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145017} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.470631] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1139.470631] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1139.470631] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1139.470631] env[69992]: INFO nova.compute.manager [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1139.470631] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1139.470631] env[69992]: DEBUG nova.compute.manager [-] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1139.470631] env[69992]: DEBUG nova.network.neutron [-] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1139.797021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1139.797021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Fetch image to [datastore2] OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20/OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1139.797021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Downloading stream optimized image f4723384-8c26-48b3-817e-be7849f27178 to [datastore2] OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20/OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20.vmdk on the data store datastore2 as vApp {{(pid=69992) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1139.797021] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Downloading image file data f4723384-8c26-48b3-817e-be7849f27178 to the ESX as VM named 'OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20' {{(pid=69992) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1139.871221] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897367, 'name': Destroy_Task, 'duration_secs': 0.348105} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.871810] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Destroyed the VM [ 1139.872387] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1139.875093] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1139.875093] env[69992]: value = "resgroup-9" [ 1139.875093] env[69992]: _type = "ResourcePool" [ 1139.875093] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1139.875530] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6f842bb3-0d6f-414a-a631-9ec97785dda2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.877354] env[69992]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-191fa244-ea8d-45a6-8ee1-ed74ff30178e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.900207] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1139.900207] env[69992]: value = "task-2897370" [ 1139.900207] env[69992]: _type = "Task" [ 1139.900207] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.900621] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease: (returnval){ [ 1139.900621] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dbaf7-5f89-ed29-9bda-772c869d544c" [ 1139.900621] env[69992]: _type = "HttpNfcLease" [ 1139.900621] env[69992]: } obtained for vApp import into resource pool (val){ [ 1139.900621] env[69992]: value = "resgroup-9" [ 1139.900621] env[69992]: _type = "ResourcePool" [ 1139.900621] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1139.901185] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the lease: (returnval){ [ 1139.901185] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dbaf7-5f89-ed29-9bda-772c869d544c" [ 1139.901185] env[69992]: _type = "HttpNfcLease" [ 1139.901185] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1139.911888] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897370, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.913584] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1139.913584] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dbaf7-5f89-ed29-9bda-772c869d544c" [ 1139.913584] env[69992]: _type = "HttpNfcLease" [ 1139.913584] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1139.956144] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897368, 'name': Rename_Task, 'duration_secs': 0.161667} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.956685] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.957055] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c424f641-dc9d-407e-941d-d846598ed314 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.964257] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1139.964257] env[69992]: value = "task-2897372" [ 1139.964257] env[69992]: _type = "Task" [ 1139.964257] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.967015] env[69992]: ERROR nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [req-8fb85bcf-3286-44d2-ad35-6d2ed2200df8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8fb85bcf-3286-44d2-ad35-6d2ed2200df8"}]} [ 1139.978095] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.983217] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1140.002462] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1140.002679] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.008377] env[69992]: DEBUG nova.compute.manager [req-86675696-c32c-4ad4-af4b-77ff7fe3319f req-16d1df37-d3e0-498e-9293-a369830e14ad service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Received event network-vif-deleted-4e3de664-c228-44f8-84f3-2c40c903246e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.008686] env[69992]: INFO nova.compute.manager [req-86675696-c32c-4ad4-af4b-77ff7fe3319f req-16d1df37-d3e0-498e-9293-a369830e14ad service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Neutron deleted interface 4e3de664-c228-44f8-84f3-2c40c903246e; detaching it from the instance and deleting it from the info cache [ 1140.008773] env[69992]: DEBUG nova.network.neutron [req-86675696-c32c-4ad4-af4b-77ff7fe3319f req-16d1df37-d3e0-498e-9293-a369830e14ad service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.017626] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1140.037183] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1140.220283] env[69992]: DEBUG nova.network.neutron [-] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.413615] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897370, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.417470] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1140.417470] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dbaf7-5f89-ed29-9bda-772c869d544c" [ 1140.417470] env[69992]: _type = "HttpNfcLease" [ 1140.417470] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1140.477679] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897372, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.487296] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.487501] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.487692] env[69992]: DEBUG nova.network.neutron [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.512863] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9387234d-4208-416c-84cf-03d6e7e4d8ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.524067] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cafcaf-bc4a-4ccc-a040-2de162b8fb9d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.574432] env[69992]: DEBUG nova.compute.manager [req-86675696-c32c-4ad4-af4b-77ff7fe3319f req-16d1df37-d3e0-498e-9293-a369830e14ad service nova] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Detach interface failed, port_id=4e3de664-c228-44f8-84f3-2c40c903246e, reason: Instance f64108ec-c3b2-4b11-9085-2c56b0de93f5 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1140.576289] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedc494f-9cf1-4590-ac1e-b1c5aa6a45b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.583193] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449986d4-f1f7-4f03-b463-16647099299e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.613009] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a02d011-157b-46a7-a54c-cba21d16ede9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.620442] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b437258-4c66-4c74-9755-c220757bfd8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.633730] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.693780] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "408de352-797c-40c2-86bc-359e01c5c04e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.693972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "408de352-797c-40c2-86bc-359e01c5c04e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.722405] env[69992]: INFO nova.compute.manager [-] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Took 1.25 seconds to deallocate network for instance. [ 1140.915019] env[69992]: DEBUG oslo_vmware.api [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897370, 'name': RemoveSnapshot_Task, 'duration_secs': 0.838053} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.915952] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1140.916222] env[69992]: INFO nova.compute.manager [None req-dbfaf2c1-1631-4401-bd1f-9a95a014578c tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Took 16.00 seconds to snapshot the instance on the hypervisor. [ 1140.918704] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1140.918704] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dbaf7-5f89-ed29-9bda-772c869d544c" [ 1140.918704] env[69992]: _type = "HttpNfcLease" [ 1140.918704] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1140.918891] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1140.918891] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dbaf7-5f89-ed29-9bda-772c869d544c" [ 1140.918891] env[69992]: _type = "HttpNfcLease" [ 1140.918891] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1140.919721] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e642e81-3fd3-495a-8124-2bd28f8f671f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.927247] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5291e4bb-40f6-2d51-ced2-18d759c95482/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1140.927429] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating HTTP connection to write to file with size = 31671808 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5291e4bb-40f6-2d51-ced2-18d759c95482/disk-0.vmdk. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1141.002025] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b714a0cf-ae38-4ac0-a497-070e68333e4a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.005695] env[69992]: DEBUG oslo_vmware.api [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897372, 'name': PowerOnVM_Task, 'duration_secs': 0.589695} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.006331] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.006570] env[69992]: DEBUG nova.compute.manager [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.010021] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f10f11-27e9-4393-a0ec-4a66a7deef7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.155940] env[69992]: ERROR nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [req-7d6054eb-683f-4421-807b-c3fb8f27501f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7d6054eb-683f-4421-807b-c3fb8f27501f"}]} [ 1141.171592] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1141.185242] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1141.185433] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1141.196515] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1141.199588] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1141.218398] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1141.229931] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.233062] env[69992]: DEBUG nova.network.neutron [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.532351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.718631] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1141.718903] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582015', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'name': 'volume-f9a2424a-1e64-4395-b297-954664b816f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2ac32d7-d32b-497a-a262-ab1cd95f87d0', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'serial': 'f9a2424a-1e64-4395-b297-954664b816f3'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1141.719898] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4678ecf-f03e-42e0-ab01-bdc359059b2f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.725659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.739538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.744689] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae00527d-d7b0-4bcd-88e7-161b45c13125 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.747658] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50af6110-4803-412d-8207-df6e3f3268f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.767293] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280b16aa-0693-4b71-b0b4-db17294ced09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.777585] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] volume-f9a2424a-1e64-4395-b297-954664b816f3/volume-f9a2424a-1e64-4395-b297-954664b816f3.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1141.777856] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3761dd62-893f-4755-beeb-a2e4ab223c6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.827606] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890add45-cda0-4b3b-a16e-66514935d37a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.830609] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1141.830609] env[69992]: value = "task-2897373" [ 1141.830609] env[69992]: _type = "Task" [ 1141.830609] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.840049] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d5f510-235a-4205-a265-266a1fb4b432 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.850247] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897373, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.860021] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.163637] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.163973] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.164339] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.164607] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.164872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.169443] env[69992]: INFO nova.compute.manager [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Terminating instance [ 1142.277459] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059c7b02-babf-47ba-bd19-c06a20d64ff0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.297507] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ffb458-9d24-4e68-8006-1c676f4087f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.304697] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1142.345283] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897373, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.398416] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1142.398665] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 105 to 106 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1142.398818] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.675390] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "refresh_cache-4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.675645] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquired lock "refresh_cache-4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.675850] env[69992]: DEBUG nova.network.neutron [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.682725] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1142.682925] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5291e4bb-40f6-2d51-ced2-18d759c95482/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1142.683818] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb3243c-49eb-4b9d-ac64-8b0f0633cd25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.690825] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5291e4bb-40f6-2d51-ced2-18d759c95482/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1142.690997] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5291e4bb-40f6-2d51-ced2-18d759c95482/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1142.691257] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3e28e251-f58b-4e1c-b6da-36a07c7e47cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.810627] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-901d9138-2f41-457c-831f-6b97c4c02063 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance 'a7f01cd7-f148-48fc-a71a-5461672d6039' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1142.841603] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897373, 'name': ReconfigVM_Task, 'duration_secs': 0.597218} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.841881] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Reconfigured VM instance instance-00000040 to attach disk [datastore2] volume-f9a2424a-1e64-4395-b297-954664b816f3/volume-f9a2424a-1e64-4395-b297-954664b816f3.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1142.846646] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa1007c8-ca87-4bb0-9a4d-bfdb70355da8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.862544] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1142.862544] env[69992]: value = "task-2897374" [ 1142.862544] env[69992]: _type = "Task" [ 1142.862544] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.872575] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.906570] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1142.906841] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.209s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.907142] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.046s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.908771] env[69992]: INFO nova.compute.claims [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1142.911556] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.911709] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1143.031162] env[69992]: DEBUG oslo_vmware.rw_handles [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5291e4bb-40f6-2d51-ced2-18d759c95482/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1143.031444] env[69992]: INFO nova.virt.vmwareapi.images [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Downloaded image file data f4723384-8c26-48b3-817e-be7849f27178 [ 1143.032795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cb60f7-6532-4569-88d1-7366fad8c789 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.051726] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-988a7a49-bd7c-4ec0-b391-8150497ccb27 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.077459] env[69992]: INFO nova.virt.vmwareapi.images [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] The imported VM was unregistered [ 1143.079954] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1143.080273] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.080555] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dc87db2-0871-49f0-81dd-cf3470c8c6d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.103251] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created directory with path [datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.103486] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20/OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20.vmdk to [datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk. {{(pid=69992) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1143.103739] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c5e92c3e-56f7-4324-b088-d5a0d25dba35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.110617] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1143.110617] env[69992]: value = "task-2897376" [ 1143.110617] env[69992]: _type = "Task" [ 1143.110617] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.118589] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897376, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.195641] env[69992]: DEBUG nova.network.neutron [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1143.244746] env[69992]: DEBUG nova.network.neutron [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.373801] env[69992]: DEBUG oslo_vmware.api [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897374, 'name': ReconfigVM_Task, 'duration_secs': 0.171318} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.374337] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582015', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'name': 'volume-f9a2424a-1e64-4395-b297-954664b816f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2ac32d7-d32b-497a-a262-ab1cd95f87d0', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'serial': 'f9a2424a-1e64-4395-b297-954664b816f3'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1143.434864] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] There are 36 instances to clean {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1143.435364] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: efa06ccc-be20-4d0e-938f-01c91ef4de8e] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1143.622364] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897376, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.747598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Releasing lock "refresh_cache-4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.748163] env[69992]: DEBUG nova.compute.manager [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1143.748376] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.749262] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245f6aef-9ac6-454a-839b-06a34f2ff400 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.758705] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.758987] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fd5efd0-cc9a-4512-b3d3-01028a2810f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.768605] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1143.768605] env[69992]: value = "task-2897377" [ 1143.768605] env[69992]: _type = "Task" [ 1143.768605] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.779454] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.945597] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 06442c68-7dc6-46a1-9e35-34a62730a555] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1143.995793] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af1c7eb-e8f9-421a-9fcd-13246fcb00ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.004521] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9df7aca-9c8e-4407-819b-81c5d3cde6bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.040593] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee6a850-2db9-4c00-9f97-996fe5331966 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.052048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2628b0-6b21-4a7c-99f5-4aa375b0bc29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.070259] env[69992]: DEBUG nova.compute.provider_tree [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.123742] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897376, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.282192] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897377, 'name': PowerOffVM_Task, 'duration_secs': 0.235454} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.282609] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.282735] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.283020] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-886515eb-3a80-41a8-8180-1a37cad0362c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.311477] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.311827] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.312062] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Deleting the datastore file [datastore2] 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.312347] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53af28f2-7d42-4bc6-939f-6ea42a02c3d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.321239] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for the task: (returnval){ [ 1144.321239] env[69992]: value = "task-2897379" [ 1144.321239] env[69992]: _type = "Task" [ 1144.321239] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.342955] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.428414] env[69992]: DEBUG nova.objects.instance [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'flavor' on Instance uuid f2ac32d7-d32b-497a-a262-ab1cd95f87d0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.453159] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: b3d62400-e639-4c49-9207-64fd1e684f99] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.577400] env[69992]: DEBUG nova.scheduler.client.report [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.581269] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.581499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.581682] env[69992]: DEBUG nova.compute.manager [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Going to confirm migration 3 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1144.624122] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897376, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.834495] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.934511] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d711b263-e14c-42e1-9127-85d3aec0931f tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.857s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.956746] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 714fafbf-a765-4e2c-8633-997d8244483c] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.084974] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.085303] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1145.091277] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 52.077s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.091430] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.093942] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.432s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.095406] env[69992]: INFO nova.compute.claims [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.130392] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897376, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.132808] env[69992]: INFO nova.scheduler.client.report [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted allocations for instance 673be00f-e3c5-4a54-beeb-cf89828e9e32 [ 1145.184827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.184827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.184827] env[69992]: DEBUG nova.network.neutron [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1145.184827] env[69992]: DEBUG nova.objects.instance [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'info_cache' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.336534] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.460793] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 2b1a0943-d59a-441d-a2e6-8149106803b6] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.538793] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.539080] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.539406] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.539576] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.539755] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.542327] env[69992]: INFO nova.compute.manager [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Terminating instance [ 1145.592201] env[69992]: DEBUG nova.compute.utils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1145.593875] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1145.594014] env[69992]: DEBUG nova.network.neutron [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1145.625280] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897376, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.418866} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.625604] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20/OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20.vmdk to [datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk. [ 1145.625772] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Cleaning up location [datastore2] OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1145.625941] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_d30c88c5-c09b-4219-8449-e0113b65fe20 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1145.626451] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a55162b2-7c8c-48ba-9853-37f3294fff0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.632764] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1145.632764] env[69992]: value = "task-2897380" [ 1145.632764] env[69992]: _type = "Task" [ 1145.632764] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.634127] env[69992]: DEBUG nova.policy [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1145.646855] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.647764] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5d5a12fe-de13-4a07-bc45-490da0c51db2 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "673be00f-e3c5-4a54-beeb-cf89828e9e32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.437s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.835372] env[69992]: DEBUG oslo_vmware.api [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Task: {'id': task-2897379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.146775} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.835552] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1145.835732] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1145.835908] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1145.836100] env[69992]: INFO nova.compute.manager [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Took 2.09 seconds to destroy the instance on the hypervisor. [ 1145.836352] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1145.836539] env[69992]: DEBUG nova.compute.manager [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1145.836634] env[69992]: DEBUG nova.network.neutron [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1145.866343] env[69992]: DEBUG nova.network.neutron [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1145.914164] env[69992]: DEBUG nova.network.neutron [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Successfully created port: 5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1145.963857] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 62936d27-5405-4d29-b3ff-c4d8a74ba440] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.046801] env[69992]: DEBUG nova.compute.manager [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1146.047190] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.047361] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67a6307a-ea63-4ebc-9a2d-1c26b89de527 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.054559] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1146.054559] env[69992]: value = "task-2897381" [ 1146.054559] env[69992]: _type = "Task" [ 1146.054559] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.064296] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.081082] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.081372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.081594] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.081774] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.082027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.085030] env[69992]: INFO nova.compute.manager [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Terminating instance [ 1146.097661] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1146.148428] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038245} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.148689] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1146.148845] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.149125] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk to [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1146.152635] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb43a29a-e761-43ba-8b70-f17197e0d6fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.159948] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1146.159948] env[69992]: value = "task-2897382" [ 1146.159948] env[69992]: _type = "Task" [ 1146.159948] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.168736] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.370790] env[69992]: DEBUG nova.network.neutron [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.466887] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 40b1fbe3-bde8-4fe1-91ea-00a19e2b86a4] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.502728] env[69992]: DEBUG nova.network.neutron [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.565657] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897381, 'name': PowerOffVM_Task, 'duration_secs': 0.202829} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.565922] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1146.566141] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1146.566342] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582015', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'name': 'volume-f9a2424a-1e64-4395-b297-954664b816f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2ac32d7-d32b-497a-a262-ab1cd95f87d0', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'serial': 'f9a2424a-1e64-4395-b297-954664b816f3'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1146.567112] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf9cc4b-e56b-4c24-9b5b-e04d1902febe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.587837] env[69992]: DEBUG nova.compute.manager [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1146.588113] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1146.588890] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52c6be0-ab8a-4f41-a6ac-7d2c51151060 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.591861] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ffa265-0009-4dcc-b2d3-874760e324f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.600812] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.601112] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-866be1bb-bc38-4145-a20c-1d10469f1ab2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.602752] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d70a54-cadb-497d-a67e-0d6796ad9207 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.631414] env[69992]: DEBUG oslo_vmware.api [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1146.631414] env[69992]: value = "task-2897383" [ 1146.631414] env[69992]: _type = "Task" [ 1146.631414] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.632140] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a363c4-2c12-4fda-840a-c751bae400cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.640869] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31948fb-8224-4519-83c6-1d7db9644c9c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.659230] env[69992]: DEBUG oslo_vmware.api [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.659599] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] The volume has not been displaced from its original location: [datastore2] volume-f9a2424a-1e64-4395-b297-954664b816f3/volume-f9a2424a-1e64-4395-b297-954664b816f3.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1146.665409] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1146.666598] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f80f5e0a-a0b2-4293-a69e-20f3e67ed8b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.687614] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0fbcbd-3812-4ae4-ac5e-162ce9911fc7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.692780] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1146.692780] env[69992]: value = "task-2897384" [ 1146.692780] env[69992]: _type = "Task" [ 1146.692780] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.726475] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.727792] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de191052-9591-4c8e-876f-9f3904c30fbe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.734815] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.742579] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e33597c-5f6f-435c-9d9e-a3da73520c7c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.758607] env[69992]: DEBUG nova.compute.provider_tree [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1146.873332] env[69992]: INFO nova.compute.manager [-] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Took 1.04 seconds to deallocate network for instance. [ 1146.970456] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: f2e172f5-eafe-4f7c-ba9c-0fcf89ccca75] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.005082] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.005442] env[69992]: DEBUG nova.objects.instance [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'migration_context' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.110744] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1147.145434] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1147.145826] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1147.146488] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1147.146860] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1147.147172] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1147.147441] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1147.147832] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1147.148169] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1147.148509] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1147.148803] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1147.149155] env[69992]: DEBUG nova.virt.hardware [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1147.151253] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55039f5f-35b7-4378-abcd-1bb9b707888c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.160491] env[69992]: DEBUG oslo_vmware.api [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897383, 'name': PowerOffVM_Task, 'duration_secs': 0.25841} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.161557] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.161898] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1147.162307] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48ef79d2-f064-4341-911c-904b021c4897 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.173612] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79d0cde-50ba-41c1-9a5e-e8e1dab532fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.182427] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.202357] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897384, 'name': ReconfigVM_Task, 'duration_secs': 0.318184} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.202728] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1147.207536] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26a5aa25-4671-4275-9bb8-303f1e353720 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.227042] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1147.227042] env[69992]: value = "task-2897386" [ 1147.227042] env[69992]: _type = "Task" [ 1147.227042] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.235570] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897386, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.245378] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1147.245586] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1147.245764] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleting the datastore file [datastore1] bcb5131c-b2c6-4971-8a2e-4fcd7133442d {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1147.246064] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c772615f-91af-426b-bc98-da5482b8d479 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.252744] env[69992]: DEBUG oslo_vmware.api [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1147.252744] env[69992]: value = "task-2897387" [ 1147.252744] env[69992]: _type = "Task" [ 1147.252744] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.264035] env[69992]: DEBUG oslo_vmware.api [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.295574] env[69992]: DEBUG nova.scheduler.client.report [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 106 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1147.295574] env[69992]: DEBUG nova.compute.provider_tree [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 106 to 107 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1147.295574] env[69992]: DEBUG nova.compute.provider_tree [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1147.347543] env[69992]: DEBUG nova.compute.manager [req-dfd816b4-db6d-4081-8587-bb8ba701e4e7 req-e6ae7b01-9a1f-4957-a561-df716d6df37a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Received event network-vif-plugged-5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1147.347964] env[69992]: DEBUG oslo_concurrency.lockutils [req-dfd816b4-db6d-4081-8587-bb8ba701e4e7 req-e6ae7b01-9a1f-4957-a561-df716d6df37a service nova] Acquiring lock "bce01d14-3c1b-4dce-b61c-721e25a56497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.348304] env[69992]: DEBUG oslo_concurrency.lockutils [req-dfd816b4-db6d-4081-8587-bb8ba701e4e7 req-e6ae7b01-9a1f-4957-a561-df716d6df37a service nova] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.348596] env[69992]: DEBUG oslo_concurrency.lockutils [req-dfd816b4-db6d-4081-8587-bb8ba701e4e7 req-e6ae7b01-9a1f-4957-a561-df716d6df37a service nova] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.348882] env[69992]: DEBUG nova.compute.manager [req-dfd816b4-db6d-4081-8587-bb8ba701e4e7 req-e6ae7b01-9a1f-4957-a561-df716d6df37a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] No waiting events found dispatching network-vif-plugged-5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1147.349177] env[69992]: WARNING nova.compute.manager [req-dfd816b4-db6d-4081-8587-bb8ba701e4e7 req-e6ae7b01-9a1f-4957-a561-df716d6df37a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Received unexpected event network-vif-plugged-5ac51a75-6c4c-4475-8100-da5a8a5831ff for instance with vm_state building and task_state spawning. [ 1147.382527] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.419551] env[69992]: DEBUG nova.network.neutron [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Successfully updated port: 5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1147.473448] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: d361769c-bfc2-4c72-83f4-dc9b51f907a3] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.509078] env[69992]: DEBUG nova.objects.base [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1147.510450] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff2832e-7840-4916-9a34-2f72a0ba751a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.533198] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83afac47-48c4-4ba8-929e-18d884f8b119 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.541920] env[69992]: DEBUG oslo_vmware.api [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1147.541920] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dad78-0b11-aadd-70e6-8217c68f625a" [ 1147.541920] env[69992]: _type = "Task" [ 1147.541920] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.553427] env[69992]: DEBUG oslo_vmware.api [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527dad78-0b11-aadd-70e6-8217c68f625a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.678111] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.740065] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897386, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.764106] env[69992]: DEBUG oslo_vmware.api [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365822} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.764106] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1147.764296] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1147.764472] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1147.764713] env[69992]: INFO nova.compute.manager [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1147.764990] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1147.765641] env[69992]: DEBUG nova.compute.manager [-] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1147.765641] env[69992]: DEBUG nova.network.neutron [-] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1147.800940] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.801651] env[69992]: DEBUG nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1147.804671] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.897s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.804944] env[69992]: DEBUG nova.objects.instance [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1147.922804] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-bce01d14-3c1b-4dce-b61c-721e25a56497" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.925534] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-bce01d14-3c1b-4dce-b61c-721e25a56497" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.925534] env[69992]: DEBUG nova.network.neutron [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.977882] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 97cb6372-3f4e-427d-9509-7e6c43aa2e7b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.053762] env[69992]: DEBUG oslo_vmware.api [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527dad78-0b11-aadd-70e6-8217c68f625a, 'name': SearchDatastore_Task, 'duration_secs': 0.048853} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.054083] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.178986] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.240465] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897386, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.312060] env[69992]: DEBUG nova.compute.utils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1148.313525] env[69992]: DEBUG nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1148.461898] env[69992]: DEBUG nova.network.neutron [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1148.484764] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e0b5ad16-f631-444c-a189-167e34574316] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.521583] env[69992]: DEBUG nova.network.neutron [-] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.617074] env[69992]: DEBUG nova.network.neutron [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Updating instance_info_cache with network_info: [{"id": "5ac51a75-6c4c-4475-8100-da5a8a5831ff", "address": "fa:16:3e:8e:da:7c", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ac51a75-6c", "ovs_interfaceid": "5ac51a75-6c4c-4475-8100-da5a8a5831ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.681979] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.741762] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897386, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.815555] env[69992]: DEBUG nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1148.818989] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8d903a4-d963-4776-8ae9-56cce8b2cf9b tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.820325] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.849s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.821808] env[69992]: INFO nova.compute.claims [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1148.990676] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: c205f559-7fe6-4d7e-beba-2fc96b89d705] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.024195] env[69992]: INFO nova.compute.manager [-] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Took 1.26 seconds to deallocate network for instance. [ 1149.120342] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-bce01d14-3c1b-4dce-b61c-721e25a56497" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.120754] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Instance network_info: |[{"id": "5ac51a75-6c4c-4475-8100-da5a8a5831ff", "address": "fa:16:3e:8e:da:7c", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ac51a75-6c", "ovs_interfaceid": "5ac51a75-6c4c-4475-8100-da5a8a5831ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1149.121372] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:da:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ac51a75-6c4c-4475-8100-da5a8a5831ff', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.130347] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.130621] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.130856] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da983873-a81e-46aa-a01d-13da78df2d42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.154830] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.154830] env[69992]: value = "task-2897388" [ 1149.154830] env[69992]: _type = "Task" [ 1149.154830] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.165430] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897388, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.179375] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.242274] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897386, 'name': ReconfigVM_Task, 'duration_secs': 1.905962} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.242772] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582015', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'name': 'volume-f9a2424a-1e64-4395-b297-954664b816f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f2ac32d7-d32b-497a-a262-ab1cd95f87d0', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a2424a-1e64-4395-b297-954664b816f3', 'serial': 'f9a2424a-1e64-4395-b297-954664b816f3'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1149.242954] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1149.243812] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8313f40-16a6-4005-a6e3-0a768496f867 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.256132] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.256319] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb4a2200-7ae5-49c1-b3af-cd8c172c2340 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.334097] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.334097] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.334097] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleting the datastore file [datastore1] f2ac32d7-d32b-497a-a262-ab1cd95f87d0 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.334097] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9590f3cd-e5ac-49c0-b323-8592e626843c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.341667] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1149.341667] env[69992]: value = "task-2897390" [ 1149.341667] env[69992]: _type = "Task" [ 1149.341667] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.351157] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.385083] env[69992]: DEBUG nova.compute.manager [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Received event network-changed-5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.385369] env[69992]: DEBUG nova.compute.manager [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Refreshing instance network info cache due to event network-changed-5ac51a75-6c4c-4475-8100-da5a8a5831ff. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1149.385665] env[69992]: DEBUG oslo_concurrency.lockutils [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] Acquiring lock "refresh_cache-bce01d14-3c1b-4dce-b61c-721e25a56497" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.385887] env[69992]: DEBUG oslo_concurrency.lockutils [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] Acquired lock "refresh_cache-bce01d14-3c1b-4dce-b61c-721e25a56497" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.386142] env[69992]: DEBUG nova.network.neutron [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Refreshing network info cache for port 5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.496786] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 94a4a16e-926c-47ce-a5a7-0b216b7c5442] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.533551] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.666021] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897388, 'name': CreateVM_Task, 'duration_secs': 0.455552} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.666021] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1149.666154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.666265] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.666587] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1149.666838] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d5c4ca3-44a7-4f1e-af36-2c7390750890 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.674217] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1149.674217] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52857411-ff00-151d-d1a9-ffea2fd5736b" [ 1149.674217] env[69992]: _type = "Task" [ 1149.674217] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.680101] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897382, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.194523} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.680638] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f4723384-8c26-48b3-817e-be7849f27178/f4723384-8c26-48b3-817e-be7849f27178.vmdk to [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1149.681443] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d18f09-dc4c-4cf5-90f6-5ea54005c26a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.686472] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52857411-ff00-151d-d1a9-ffea2fd5736b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.706828] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1149.707156] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-305439da-dc5d-4c64-b52a-5fc7abc90ceb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.725963] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1149.725963] env[69992]: value = "task-2897391" [ 1149.725963] env[69992]: _type = "Task" [ 1149.725963] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.733983] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.827597] env[69992]: DEBUG nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1149.852315] env[69992]: DEBUG oslo_vmware.api [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.502673} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.852639] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.852978] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.853029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.853208] env[69992]: INFO nova.compute.manager [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Took 3.81 seconds to destroy the instance on the hypervisor. [ 1149.853578] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.853819] env[69992]: DEBUG nova.compute.manager [-] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1149.853928] env[69992]: DEBUG nova.network.neutron [-] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1149.860325] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.860567] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.860728] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.860914] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.861160] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.861218] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.861422] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.861584] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.861749] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.861913] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.862099] env[69992]: DEBUG nova.virt.hardware [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.863322] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aee6fe2-4d16-4567-b3cd-063173ae218c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.873070] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bfd534-c423-4503-8e36-8bf9c29b180a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.886632] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.892869] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Creating folder: Project (1e7680c4a71a45cdbf4e037d87b42300). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1149.897350] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ada36db-d975-48da-8be4-6cea4be3d642 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.911057] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Created folder: Project (1e7680c4a71a45cdbf4e037d87b42300) in parent group-v581821. [ 1149.911273] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Creating folder: Instances. Parent ref: group-v582018. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1149.911507] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43cdaeb3-6102-4859-b649-5a2f0faf7d9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.922675] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Created folder: Instances in parent group-v582018. [ 1149.923169] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.923169] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.924053] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02f0a510-740e-49d0-bcff-b4385059b0d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.946581] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.946581] env[69992]: value = "task-2897394" [ 1149.946581] env[69992]: _type = "Task" [ 1149.946581] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.954256] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897394, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.999995] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7fbab19d-5a0a-4da3-b078-40ca0eaf8c97] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.190471] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52857411-ff00-151d-d1a9-ffea2fd5736b, 'name': SearchDatastore_Task, 'duration_secs': 0.052378} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.191422] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.191815] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1150.192187] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.192557] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.192820] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.195693] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aeaf051-1dcd-44a4-9af0-d4bc6d5506ba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.209055] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.209055] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1150.209055] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b283985c-5689-42ad-b077-a01d266d3af7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.215251] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1150.215251] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ef2b7c-2bb6-a31e-2f30-9976774928e1" [ 1150.215251] env[69992]: _type = "Task" [ 1150.215251] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.227790] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ef2b7c-2bb6-a31e-2f30-9976774928e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.236915] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897391, 'name': ReconfigVM_Task, 'duration_secs': 0.372745} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.237808] env[69992]: DEBUG nova.network.neutron [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Updated VIF entry in instance network info cache for port 5ac51a75-6c4c-4475-8100-da5a8a5831ff. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.238243] env[69992]: DEBUG nova.network.neutron [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Updating instance_info_cache with network_info: [{"id": "5ac51a75-6c4c-4475-8100-da5a8a5831ff", "address": "fa:16:3e:8e:da:7c", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ac51a75-6c", "ovs_interfaceid": "5ac51a75-6c4c-4475-8100-da5a8a5831ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.241868] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfigured VM instance instance-0000001f to attach disk [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2/dd31269e-716c-44cd-9fc3-ce227fe5b3b2.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.243394] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'boot_index': 0, 'encryption_options': None, 'disk_bus': None, 'device_type': 'disk', 'guest_format': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'encrypted': False, 'device_name': '/dev/sda', 'image_id': 'eb50549f-9db8-4c15-a738-0e4b1e9e33fb'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'disk_bus': None, 'device_type': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581990', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'name': 'volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'dd31269e-716c-44cd-9fc3-ce227fe5b3b2', 'attached_at': '', 'detached_at': '', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'serial': '964072e4-b1a4-47ae-8221-dfb900c2f8b1'}, 'attachment_id': 'cfc44e65-d20b-483c-b6a0-643728b589ce', 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=69992) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1150.243951] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1150.244232] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581990', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'name': 'volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'dd31269e-716c-44cd-9fc3-ce227fe5b3b2', 'attached_at': '', 'detached_at': '', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'serial': '964072e4-b1a4-47ae-8221-dfb900c2f8b1'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1150.245971] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f32aa5-cca8-46e8-ba6c-bd6d69617230 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.267598] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cf66cf-4f73-4831-b170-1b3947059673 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.297414] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1/volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.300294] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-489af56f-cc19-49fa-822f-7eb9a2bb8ce4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.319249] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1150.319249] env[69992]: value = "task-2897395" [ 1150.319249] env[69992]: _type = "Task" [ 1150.319249] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.327173] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.456363] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897394, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.486495] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22738fe-3a93-4001-9340-5c7262fda95f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.493954] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a0fef3-c052-458e-93f5-9785c53984a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.523973] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 98cd0eb8-d17a-4a9b-a172-1ba1207168d0] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.527932] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc4c6a8-80ae-4d3f-8263-3f0540b66712 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.536202] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263e05d3-1846-44f6-ada5-e20a8bbf142a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.551155] env[69992]: DEBUG nova.compute.provider_tree [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.681616] env[69992]: DEBUG nova.network.neutron [-] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.726099] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ef2b7c-2bb6-a31e-2f30-9976774928e1, 'name': SearchDatastore_Task, 'duration_secs': 0.009828} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.726891] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76c29079-5f8f-4c78-8a3c-a041523efeb4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.732324] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1150.732324] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529ae4ab-8ec6-2bc3-ade7-a18b40968d65" [ 1150.732324] env[69992]: _type = "Task" [ 1150.732324] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.739339] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529ae4ab-8ec6-2bc3-ade7-a18b40968d65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.746475] env[69992]: DEBUG oslo_concurrency.lockutils [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] Releasing lock "refresh_cache-bce01d14-3c1b-4dce-b61c-721e25a56497" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.746761] env[69992]: DEBUG nova.compute.manager [req-6b2c5ccf-7946-4721-97f0-0cdc8f59d8f0 req-ce16775f-a85f-452f-b104-864c06196ad7 service nova] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Received event network-vif-deleted-2dde3583-4d1f-43c5-8824-63e1fab3ee3d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1150.829437] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.956588] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897394, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.028394] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: ab3df643-58db-45b7-a572-9c040135989d] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.057926] env[69992]: DEBUG nova.scheduler.client.report [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.184712] env[69992]: INFO nova.compute.manager [-] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Took 1.33 seconds to deallocate network for instance. [ 1151.243549] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529ae4ab-8ec6-2bc3-ade7-a18b40968d65, 'name': SearchDatastore_Task, 'duration_secs': 0.009463} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.243549] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.243821] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] bce01d14-3c1b-4dce-b61c-721e25a56497/bce01d14-3c1b-4dce-b61c-721e25a56497.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1151.243974] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d730de3c-e805-462a-8676-9cea7519b8a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.252468] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1151.252468] env[69992]: value = "task-2897396" [ 1151.252468] env[69992]: _type = "Task" [ 1151.252468] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.262106] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.329393] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.461254] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897394, 'name': CreateVM_Task, 'duration_secs': 1.353678} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.461442] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.461879] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.462185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.462508] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.462779] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1947249-1f81-4393-97df-35777d39162f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.470352] env[69992]: DEBUG nova.compute.manager [req-3a7b1810-156f-454a-a2f6-e6861655a263 req-99cd66ac-c199-439d-b85d-e9028a6f31f5 service nova] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Received event network-vif-deleted-c83362f2-db37-45fa-9d49-76899f0edc31 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.472493] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1151.472493] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c539a3-fd77-ccc6-ed13-e5a09c8eaa46" [ 1151.472493] env[69992]: _type = "Task" [ 1151.472493] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.485382] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c539a3-fd77-ccc6-ed13-e5a09c8eaa46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.531712] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 1f9d0558-63fb-4a6f-a2d2-dd7a334249a2] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.566594] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.567020] env[69992]: DEBUG nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1151.569728] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.880s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.569964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.573498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.436s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.573498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.575037] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.728s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.576700] env[69992]: INFO nova.compute.claims [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1151.611052] env[69992]: INFO nova.scheduler.client.report [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted allocations for instance 086ac14d-74bb-4bb6-90b3-3e345b2894a9 [ 1151.618540] env[69992]: INFO nova.scheduler.client.report [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted allocations for instance 131096fc-addf-4d9a-9cd7-4abe98aabd1f [ 1151.754125] env[69992]: INFO nova.compute.manager [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Took 0.57 seconds to detach 1 volumes for instance. [ 1151.769267] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897396, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.830558] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897395, 'name': ReconfigVM_Task, 'duration_secs': 1.399775} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.830838] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfigured VM instance instance-0000001f to attach disk [datastore1] volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1/volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1151.835541] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a76e857-ddee-4d9c-bb2d-25698db7599a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.850402] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1151.850402] env[69992]: value = "task-2897397" [ 1151.850402] env[69992]: _type = "Task" [ 1151.850402] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.858564] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.985896] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c539a3-fd77-ccc6-ed13-e5a09c8eaa46, 'name': SearchDatastore_Task, 'duration_secs': 0.050108} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.986280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.986531] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1151.986822] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.986998] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.987243] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1151.987680] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cee246a7-7c84-46ae-bfe2-5a016ababda5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.997331] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1151.997557] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1151.998313] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-798916d0-328d-41c7-b140-f6080ff6801a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.004933] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1152.004933] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526f8a86-fe63-bdac-6a0a-88db04e7a789" [ 1152.004933] env[69992]: _type = "Task" [ 1152.004933] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.013466] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526f8a86-fe63-bdac-6a0a-88db04e7a789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.037858] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: bf75484e-4020-48f7-9419-bd88d0462b90] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.083228] env[69992]: DEBUG nova.compute.utils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1152.084707] env[69992]: DEBUG nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1152.122495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0c23b43b-350d-4ef7-b76b-534cdd7fd0e7 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "086ac14d-74bb-4bb6-90b3-3e345b2894a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.868s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.131096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b646bed-6307-4cac-9d97-a5876e993034 tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "131096fc-addf-4d9a-9cd7-4abe98aabd1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.956s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.267823] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.268558] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897396, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.268558] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] bce01d14-3c1b-4dce-b61c-721e25a56497/bce01d14-3c1b-4dce-b61c-721e25a56497.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1152.268896] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1152.269077] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d533151-63df-4c3e-bcfa-a706ccbc3c6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.277375] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1152.277375] env[69992]: value = "task-2897398" [ 1152.277375] env[69992]: _type = "Task" [ 1152.277375] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.286516] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.360110] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897397, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.515877] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526f8a86-fe63-bdac-6a0a-88db04e7a789, 'name': SearchDatastore_Task, 'duration_secs': 0.009987} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.516699] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c53d571c-9b81-431c-a60f-74b6f994953b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.522182] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1152.522182] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528055df-6faf-6a92-b801-39425a69b29c" [ 1152.522182] env[69992]: _type = "Task" [ 1152.522182] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.531646] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528055df-6faf-6a92-b801-39425a69b29c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.539159] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a29534bf-ee12-4b94-839b-4a12659ebd3b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.588545] env[69992]: DEBUG nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1152.787341] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206044} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.789853] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1152.790867] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bdc88a-9371-422c-80ca-c9818bca4a4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.813347] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] bce01d14-3c1b-4dce-b61c-721e25a56497/bce01d14-3c1b-4dce-b61c-721e25a56497.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.816350] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-252149d3-a3fa-4630-baf2-f801e603a6d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.837043] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1152.837043] env[69992]: value = "task-2897399" [ 1152.837043] env[69992]: _type = "Task" [ 1152.837043] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.849282] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897399, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.862626] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897397, 'name': ReconfigVM_Task, 'duration_secs': 0.762513} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.862777] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581990', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'name': 'volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'dd31269e-716c-44cd-9fc3-ce227fe5b3b2', 'attached_at': '', 'detached_at': '', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'serial': '964072e4-b1a4-47ae-8221-dfb900c2f8b1'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1152.863271] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd57620c-4f87-4d16-a3e9-960fbbee0962 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.869797] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1152.869797] env[69992]: value = "task-2897400" [ 1152.869797] env[69992]: _type = "Task" [ 1152.869797] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.878337] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897400, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.943687] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "a35dd590-b5ff-4878-8aa5-8797814d8779" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.943926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.968585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "033d667f-5511-4254-a7e2-f8a2a94178d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.969060] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.031972] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528055df-6faf-6a92-b801-39425a69b29c, 'name': SearchDatastore_Task, 'duration_secs': 0.010727} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.034418] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.034684] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 4e93b655-aaf4-49b8-bbb2-92287ec15bbc/4e93b655-aaf4-49b8-bbb2-92287ec15bbc.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.035118] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-341afb69-c372-4eab-a9b1-eb15ebbad120 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.042436] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 00b2fd0b-7841-448d-82cf-436aa8d80cda] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.043989] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1153.043989] env[69992]: value = "task-2897401" [ 1153.043989] env[69992]: _type = "Task" [ 1153.043989] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.054570] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.073122] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5f6ef5-daec-4448-9afd-70bbe5c54d80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.082154] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f9195f-7ca1-4464-9163-87d6a1b181ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.117673] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd062e66-878b-4eee-a461-00e6796978fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.125425] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca32bbea-c656-4af3-8ffe-158e0ceef868 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.139305] env[69992]: DEBUG nova.compute.provider_tree [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.349669] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897399, 'name': ReconfigVM_Task, 'duration_secs': 0.317977} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.350071] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Reconfigured VM instance instance-00000042 to attach disk [datastore2] bce01d14-3c1b-4dce-b61c-721e25a56497/bce01d14-3c1b-4dce-b61c-721e25a56497.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.350794] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f41dce84-2962-490c-80db-919b92d29079 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.358199] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1153.358199] env[69992]: value = "task-2897402" [ 1153.358199] env[69992]: _type = "Task" [ 1153.358199] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.367247] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897402, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.380950] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897400, 'name': Rename_Task, 'duration_secs': 0.156242} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.381367] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.381661] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5aa4306b-081d-43fd-856c-5d0f6cfd306c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.392777] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1153.392777] env[69992]: value = "task-2897403" [ 1153.392777] env[69992]: _type = "Task" [ 1153.392777] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.403332] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.446935] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1153.474641] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1153.545790] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 73e41918-88b8-4ff7-9fdd-b45ac97c80ec] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.561322] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460382} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.561322] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 4e93b655-aaf4-49b8-bbb2-92287ec15bbc/4e93b655-aaf4-49b8-bbb2-92287ec15bbc.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1153.561322] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1153.561466] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4de3cfcc-d81a-4edb-886f-82e131682bc7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.568247] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1153.568247] env[69992]: value = "task-2897404" [ 1153.568247] env[69992]: _type = "Task" [ 1153.568247] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.576505] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.622516] env[69992]: DEBUG nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1153.644828] env[69992]: DEBUG nova.scheduler.client.report [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.655081] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1153.655340] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.655501] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1153.655686] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.655837] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1153.655987] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1153.656335] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1153.656413] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1153.656530] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1153.656690] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1153.656865] env[69992]: DEBUG nova.virt.hardware [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1153.657727] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d6c84b-605b-46d4-8d08-0b01eb14d12a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.665964] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6662dc37-2259-47af-9441-2bed0473da8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.681112] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.687394] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.687684] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.687909] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2161a61-fd2f-4bdd-941f-2a80d1dda825 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.704588] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.704588] env[69992]: value = "task-2897405" [ 1153.704588] env[69992]: _type = "Task" [ 1153.704588] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.715608] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897405, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.869430] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897402, 'name': Rename_Task, 'duration_secs': 0.231789} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.869527] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.869721] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60f306aa-6a59-4dd7-a469-f2cd7da6e28b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.877104] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1153.877104] env[69992]: value = "task-2897406" [ 1153.877104] env[69992]: _type = "Task" [ 1153.877104] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.885522] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897406, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.901034] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897403, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.972936] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.997916] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.052789] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9bab6bf7-43c8-4cc3-b484-4472f1acdf45] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.078387] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067331} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.078643] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.079489] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09900f26-b070-4911-9b84-f2ae46e86384 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.101267] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 4e93b655-aaf4-49b8-bbb2-92287ec15bbc/4e93b655-aaf4-49b8-bbb2-92287ec15bbc.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.101267] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6650542-6f3a-4be7-a248-8dda3a9f8f0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.121567] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1154.121567] env[69992]: value = "task-2897407" [ 1154.121567] env[69992]: _type = "Task" [ 1154.121567] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.130068] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897407, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.150495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.150716] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1154.155020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.307s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.155020] env[69992]: DEBUG nova.objects.instance [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1154.218231] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897405, 'name': CreateVM_Task, 'duration_secs': 0.299027} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.218365] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1154.218806] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.219035] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.219400] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1154.219780] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41dad915-abb9-42ad-94bc-1b70a5d862eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.224227] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1154.224227] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5208d30d-7631-f10a-79fd-aeadf4820f24" [ 1154.224227] env[69992]: _type = "Task" [ 1154.224227] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.231622] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5208d30d-7631-f10a-79fd-aeadf4820f24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.387417] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897406, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.401221] env[69992]: DEBUG oslo_vmware.api [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897403, 'name': PowerOnVM_Task, 'duration_secs': 0.521295} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.401457] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1154.510079] env[69992]: DEBUG nova.compute.manager [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1154.511049] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e4d53b-1a72-4323-9fa1-3aac4f5d32fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.555833] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 27580836-7ab5-4e64-a985-3e6fc22a8b77] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.631018] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.657974] env[69992]: DEBUG nova.compute.utils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1154.659380] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1154.659597] env[69992]: DEBUG nova.network.neutron [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1154.735069] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5208d30d-7631-f10a-79fd-aeadf4820f24, 'name': SearchDatastore_Task, 'duration_secs': 0.009669} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.735727] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.735971] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.736230] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.736385] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.736604] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.736849] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-151befb2-d21c-4931-b5a0-0f8af1a60beb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.745531] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1154.745729] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1154.746486] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-561bd62b-65d4-4cdb-9a17-70d6f1b85ae4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.751852] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1154.751852] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52305a60-7605-adc0-120e-a1aaf3069c56" [ 1154.751852] env[69992]: _type = "Task" [ 1154.751852] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.759582] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52305a60-7605-adc0-120e-a1aaf3069c56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.887792] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897406, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.965703] env[69992]: DEBUG nova.policy [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4447bbc3533486aab94f73f313a2295', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '85e3c4a655d445658f21b46f360dcfe8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1155.030964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9392be9b-30e9-46a9-871e-0e6294200311 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 79.838s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.062096] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a9274dfc-afbd-419b-a98b-053d71a05d7c] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.132123] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897407, 'name': ReconfigVM_Task, 'duration_secs': 0.928531} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.132456] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 4e93b655-aaf4-49b8-bbb2-92287ec15bbc/4e93b655-aaf4-49b8-bbb2-92287ec15bbc.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.133090] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6037a2e4-8afd-4cfe-8fa4-50a4d58530b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.139189] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1155.139189] env[69992]: value = "task-2897408" [ 1155.139189] env[69992]: _type = "Task" [ 1155.139189] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.146414] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897408, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.165759] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1155.169324] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e089ad21-2c87-42b5-82ab-d2573ad16d39 tempest-ServersAdmin275Test-316600470 tempest-ServersAdmin275Test-316600470-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.170697] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.193s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.170926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.173150] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.475s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.174755] env[69992]: INFO nova.compute.claims [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.202234] env[69992]: INFO nova.scheduler.client.report [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Deleted allocations for instance 32bdb15d-6a4d-4445-9b82-d18b0f6743b6 [ 1155.262788] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52305a60-7605-adc0-120e-a1aaf3069c56, 'name': SearchDatastore_Task, 'duration_secs': 0.008305} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.263587] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3276a5e-9551-4afa-9828-c8bd6e40c5e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.270748] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1155.270748] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ecf0f8-7ae6-581a-4c37-9da7259e464e" [ 1155.270748] env[69992]: _type = "Task" [ 1155.270748] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.276321] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ecf0f8-7ae6-581a-4c37-9da7259e464e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.388835] env[69992]: DEBUG oslo_vmware.api [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897406, 'name': PowerOnVM_Task, 'duration_secs': 1.136525} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.389253] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.390110] env[69992]: INFO nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1155.390356] env[69992]: DEBUG nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1155.391241] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b849c2c-3882-4621-82d7-efbb1c49aa80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.402159] env[69992]: DEBUG nova.network.neutron [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Successfully created port: b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1155.562513] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: c1d73002-6e69-41a6-95b3-34dccaf872ef] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.650275] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897408, 'name': Rename_Task, 'duration_secs': 0.245152} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.650386] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.650624] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49034a89-84a5-4910-a8f8-1b59fd49e18f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.660115] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1155.660115] env[69992]: value = "task-2897409" [ 1155.660115] env[69992]: _type = "Task" [ 1155.660115] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.669554] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.715424] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dad6057e-2d7a-437e-854e-3a514d37e6cc tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "32bdb15d-6a4d-4445-9b82-d18b0f6743b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.783s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.779953] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ecf0f8-7ae6-581a-4c37-9da7259e464e, 'name': SearchDatastore_Task, 'duration_secs': 0.035244} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.781082] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.781430] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1155.784049] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cb3656d-71d4-4e0a-b28c-c53dc12e981c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.791688] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1155.791688] env[69992]: value = "task-2897410" [ 1155.791688] env[69992]: _type = "Task" [ 1155.791688] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.801304] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897410, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.913891] env[69992]: INFO nova.compute.manager [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Took 63.07 seconds to build instance. [ 1156.066318] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: eba81db1-973c-4981-baca-cb98e4087510] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.172452] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897409, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.179907] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1156.214752] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1156.215078] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.215216] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1156.215463] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.215585] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1156.215734] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1156.215945] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1156.216144] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1156.216458] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1156.216660] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1156.216839] env[69992]: DEBUG nova.virt.hardware [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1156.217825] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9c742f-e3c1-4328-82b7-cf3d4945d490 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.229376] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3713c2ce-dd4a-4c1a-8af6-998b1975c93c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.287899] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.288407] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.288773] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.289118] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.289457] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.293696] env[69992]: INFO nova.compute.manager [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Terminating instance [ 1156.311132] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897410, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485487} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.312063] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1156.312293] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1156.312544] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23c58f96-c0db-40f2-8068-223bf550fbc0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.323061] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1156.323061] env[69992]: value = "task-2897411" [ 1156.323061] env[69992]: _type = "Task" [ 1156.323061] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.335374] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897411, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.415649] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8aa843a8-c29b-41fe-ad4d-d325b7aa403b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.629s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.575689] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 6c58c05e-9679-4e53-89e7-c7c9cb11cff0] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.671699] env[69992]: DEBUG oslo_vmware.api [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897409, 'name': PowerOnVM_Task, 'duration_secs': 0.630806} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.674665] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.674876] env[69992]: INFO nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Took 6.85 seconds to spawn the instance on the hypervisor. [ 1156.675068] env[69992]: DEBUG nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.676282] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a273a8-c0f7-46ad-be0d-a588d979074b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.688743] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f806fd5-54f7-4c1f-ab7d-e9def22b240e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.696259] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e537aa-a2a5-4ed6-8bd3-64ddf2bdab61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.730211] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054441a8-f776-4105-b630-b4b8ae015322 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.738685] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e45469c-3a97-4f7a-b6fb-8cfe6fd6e961 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.753834] env[69992]: DEBUG nova.compute.provider_tree [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.806571] env[69992]: DEBUG nova.compute.manager [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.807014] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.807722] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71927f78-4f20-49e3-af80-c13d65ad8db7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.816608] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.817802] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95f4923e-6e1a-4561-b393-8753be6756fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.824384] env[69992]: DEBUG oslo_vmware.api [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1156.824384] env[69992]: value = "task-2897412" [ 1156.824384] env[69992]: _type = "Task" [ 1156.824384] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.835996] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897411, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064971} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.839615] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1156.839930] env[69992]: DEBUG oslo_vmware.api [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.840698] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cde7c62-9d85-4fa9-8d56-d22cb036d7d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.862191] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.862529] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6db42fc-a05b-4af1-8957-986d6addbd19 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.883774] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1156.883774] env[69992]: value = "task-2897413" [ 1156.883774] env[69992]: _type = "Task" [ 1156.883774] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.892890] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.081736] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 1d5722e1-5a48-4212-bbc7-527a3739db6e] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.107652] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.107996] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.112026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.112026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.112026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.112026] env[69992]: INFO nova.compute.manager [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Terminating instance [ 1157.149099] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "bce01d14-3c1b-4dce-b61c-721e25a56497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.149445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.149712] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "bce01d14-3c1b-4dce-b61c-721e25a56497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.149979] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.150235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.152750] env[69992]: INFO nova.compute.manager [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Terminating instance [ 1157.198379] env[69992]: INFO nova.compute.manager [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Took 50.55 seconds to build instance. [ 1157.207458] env[69992]: DEBUG nova.compute.manager [req-3991ea9a-0c81-4eaa-80d0-05dd40363104 req-8bc9182b-c79f-4dd0-9fe9-d9a83cb27f6d service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-vif-plugged-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.208416] env[69992]: DEBUG oslo_concurrency.lockutils [req-3991ea9a-0c81-4eaa-80d0-05dd40363104 req-8bc9182b-c79f-4dd0-9fe9-d9a83cb27f6d service nova] Acquiring lock "37751af7-267e-4693-aaa3-cd1bb9c3d950-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.208416] env[69992]: DEBUG oslo_concurrency.lockutils [req-3991ea9a-0c81-4eaa-80d0-05dd40363104 req-8bc9182b-c79f-4dd0-9fe9-d9a83cb27f6d service nova] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.208416] env[69992]: DEBUG oslo_concurrency.lockutils [req-3991ea9a-0c81-4eaa-80d0-05dd40363104 req-8bc9182b-c79f-4dd0-9fe9-d9a83cb27f6d service nova] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.208416] env[69992]: DEBUG nova.compute.manager [req-3991ea9a-0c81-4eaa-80d0-05dd40363104 req-8bc9182b-c79f-4dd0-9fe9-d9a83cb27f6d service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] No waiting events found dispatching network-vif-plugged-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1157.208416] env[69992]: WARNING nova.compute.manager [req-3991ea9a-0c81-4eaa-80d0-05dd40363104 req-8bc9182b-c79f-4dd0-9fe9-d9a83cb27f6d service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received unexpected event network-vif-plugged-b42eb1be-6903-43f9-8796-f03e6defbfce for instance with vm_state building and task_state spawning. [ 1157.257950] env[69992]: DEBUG nova.scheduler.client.report [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.277025] env[69992]: DEBUG nova.network.neutron [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Successfully updated port: b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1157.343079] env[69992]: DEBUG oslo_vmware.api [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897412, 'name': PowerOffVM_Task, 'duration_secs': 0.200888} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.343079] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1157.343079] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1157.343331] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9de3d3a-0375-4cdd-8c4a-0cd176b9aa69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.394782] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897413, 'name': ReconfigVM_Task, 'duration_secs': 0.347181} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.395122] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Reconfigured VM instance instance-00000044 to attach disk [datastore2] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.395784] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c305b0f2-e01a-47f3-8d46-099754af9600 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.404715] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1157.404715] env[69992]: value = "task-2897415" [ 1157.404715] env[69992]: _type = "Task" [ 1157.404715] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.417780] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897415, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.418601] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1157.418821] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1157.419008] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Deleting the datastore file [datastore1] b7a1b9e1-4d57-435f-bdb6-51481968aacb {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.419272] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02956e64-ccd7-49eb-863b-86b538a5470e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.426757] env[69992]: DEBUG oslo_vmware.api [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for the task: (returnval){ [ 1157.426757] env[69992]: value = "task-2897416" [ 1157.426757] env[69992]: _type = "Task" [ 1157.426757] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.435231] env[69992]: DEBUG oslo_vmware.api [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.587372] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e5d9de80-1ee5-462a-8459-168fd60e1972] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.617337] env[69992]: DEBUG nova.compute.manager [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1157.617337] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1157.618530] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d07a3a-b874-47f8-a433-87b3b6d723f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.627317] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.627596] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b925a22c-4138-41cb-a826-329f2c300dbc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.634933] env[69992]: DEBUG oslo_vmware.api [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1157.634933] env[69992]: value = "task-2897417" [ 1157.634933] env[69992]: _type = "Task" [ 1157.634933] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.643739] env[69992]: DEBUG oslo_vmware.api [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.656990] env[69992]: DEBUG nova.compute.manager [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1157.657241] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1157.658262] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32d81bd-d7d8-4e13-92fc-5233c65f7288 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.666690] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.666965] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b956a1b-e030-4162-accc-2336473b9ba6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.675644] env[69992]: DEBUG oslo_vmware.api [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1157.675644] env[69992]: value = "task-2897418" [ 1157.675644] env[69992]: _type = "Task" [ 1157.675644] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.684753] env[69992]: DEBUG oslo_vmware.api [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.699695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-acc714a4-8668-461a-8599-ce096f06364b tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.061s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.764045] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.764585] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1157.767708] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 37.925s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.784027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.784211] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.785221] env[69992]: DEBUG nova.network.neutron [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1157.916195] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897415, 'name': Rename_Task, 'duration_secs': 0.159098} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.916541] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1157.916860] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27b454dc-6da9-4b82-b257-36057b4d15d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.924566] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1157.924566] env[69992]: value = "task-2897419" [ 1157.924566] env[69992]: _type = "Task" [ 1157.924566] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.939299] env[69992]: DEBUG oslo_vmware.api [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Task: {'id': task-2897416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127788} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.939612] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897419, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.939880] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.940147] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.940355] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.940848] env[69992]: INFO nova.compute.manager [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1157.940848] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.940961] env[69992]: DEBUG nova.compute.manager [-] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.941123] env[69992]: DEBUG nova.network.neutron [-] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1158.090930] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: ee4c0f2b-44cb-4b37-8e4a-5706b9932144] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.147663] env[69992]: DEBUG oslo_vmware.api [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897417, 'name': PowerOffVM_Task, 'duration_secs': 0.214874} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.147972] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1158.148129] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1158.148392] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4ef9f45-9eb6-4823-84dc-a7fbd9650313 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.185111] env[69992]: DEBUG oslo_vmware.api [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897418, 'name': PowerOffVM_Task, 'duration_secs': 0.274972} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.185382] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1158.185549] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1158.185803] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1b37ed7-5097-4812-acce-df00e8047c22 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.226250] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1158.226556] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1158.226771] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Deleting the datastore file [datastore1] 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1158.227158] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0763c20f-ebdb-4981-9b3b-90c324fed89a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.235965] env[69992]: DEBUG oslo_vmware.api [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for the task: (returnval){ [ 1158.235965] env[69992]: value = "task-2897422" [ 1158.235965] env[69992]: _type = "Task" [ 1158.235965] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.244946] env[69992]: DEBUG oslo_vmware.api [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.259881] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1158.260225] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1158.260518] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore2] bce01d14-3c1b-4dce-b61c-721e25a56497 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1158.260818] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d386f93c-3652-4fda-a3a3-edfb8fa70a9e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.268919] env[69992]: DEBUG oslo_vmware.api [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1158.268919] env[69992]: value = "task-2897423" [ 1158.268919] env[69992]: _type = "Task" [ 1158.268919] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.273838] env[69992]: DEBUG nova.compute.utils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1158.279027] env[69992]: DEBUG nova.objects.instance [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lazy-loading 'migration_context' on Instance uuid 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.279027] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1158.279027] env[69992]: DEBUG nova.network.neutron [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1158.292110] env[69992]: DEBUG oslo_vmware.api [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.347414] env[69992]: DEBUG nova.network.neutron [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1158.376908] env[69992]: DEBUG nova.policy [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.435442] env[69992]: DEBUG oslo_vmware.api [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897419, 'name': PowerOnVM_Task, 'duration_secs': 0.495068} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.435700] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1158.435896] env[69992]: INFO nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Took 4.81 seconds to spawn the instance on the hypervisor. [ 1158.436094] env[69992]: DEBUG nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1158.436890] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36bff70-db99-47b9-b08a-54acba9da108 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.548211] env[69992]: DEBUG nova.network.neutron [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.598098] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 068507bb-ee7a-44f7-b315-7d4b2b70e735] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.741185] env[69992]: DEBUG nova.network.neutron [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Successfully created port: 48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1158.749168] env[69992]: DEBUG oslo_vmware.api [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Task: {'id': task-2897422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369199} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.749168] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.749168] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1158.749168] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1158.749168] env[69992]: INFO nova.compute.manager [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1158.749168] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1158.749168] env[69992]: DEBUG nova.compute.manager [-] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1158.749168] env[69992]: DEBUG nova.network.neutron [-] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1158.779356] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1158.782076] env[69992]: DEBUG oslo_vmware.api [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308278} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.782532] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.782721] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1158.782899] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1158.783091] env[69992]: INFO nova.compute.manager [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1158.783338] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1158.783527] env[69992]: DEBUG nova.compute.manager [-] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1158.783620] env[69992]: DEBUG nova.network.neutron [-] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1158.868969] env[69992]: DEBUG nova.network.neutron [-] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.961397] env[69992]: INFO nova.compute.manager [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Took 52.01 seconds to build instance. [ 1159.051957] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.052315] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Instance network_info: |[{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1159.055218] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:df:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b42eb1be-6903-43f9-8796-f03e6defbfce', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.063921] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Creating folder: Project (85e3c4a655d445658f21b46f360dcfe8). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1159.067492] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c4ae97e-659f-4726-afbb-4f96e0ff764a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.082447] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Created folder: Project (85e3c4a655d445658f21b46f360dcfe8) in parent group-v581821. [ 1159.082641] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Creating folder: Instances. Parent ref: group-v582022. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1159.082908] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75c385d1-f827-440d-9c66-516d149e2c38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.098726] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Created folder: Instances in parent group-v582022. [ 1159.099012] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1159.099243] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1159.099458] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50e50c54-f086-41b2-9732-31356ae1e865 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.120057] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e934fc79-f7c5-4ca9-9f81-85467c1e9b45] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.129061] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.129061] env[69992]: value = "task-2897426" [ 1159.129061] env[69992]: _type = "Task" [ 1159.129061] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.140889] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897426, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.351430] env[69992]: DEBUG nova.compute.manager [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.351844] env[69992]: DEBUG nova.compute.manager [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing instance network info cache due to event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1159.352172] env[69992]: DEBUG oslo_concurrency.lockutils [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.352421] env[69992]: DEBUG oslo_concurrency.lockutils [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.352678] env[69992]: DEBUG nova.network.neutron [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.371541] env[69992]: INFO nova.compute.manager [-] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Took 1.43 seconds to deallocate network for instance. [ 1159.439609] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0455652d-1266-43a3-a712-f9f3ebb8363f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.450898] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb9e0c7-baae-41e3-8c18-e5bc12dd9b7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.484921] env[69992]: DEBUG oslo_concurrency.lockutils [None req-25580f91-fb31-49f8-8f25-c1e519a7fca3 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.543s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.489020] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1dde47-a1a4-436f-8360-aeda6eb559df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.496797] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51a3e5b-21c8-45f7-9db6-d794527e74df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.512734] env[69992]: DEBUG nova.compute.provider_tree [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.570744] env[69992]: DEBUG nova.network.neutron [-] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.623203] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 93b78a8b-389c-4114-8c1d-da80146d80f3] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.651365] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897426, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.737054] env[69992]: DEBUG nova.compute.manager [req-e5261480-3107-4ac0-9d00-5278bf54deb2 req-72e3f276-5d12-40c6-8f3b-9c640b084877 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Received event network-vif-deleted-caeee1e6-2d7b-48fe-afa7-7b1525a95c86 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.737273] env[69992]: INFO nova.compute.manager [req-e5261480-3107-4ac0-9d00-5278bf54deb2 req-72e3f276-5d12-40c6-8f3b-9c640b084877 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Neutron deleted interface caeee1e6-2d7b-48fe-afa7-7b1525a95c86; detaching it from the instance and deleting it from the info cache [ 1159.737442] env[69992]: DEBUG nova.network.neutron [req-e5261480-3107-4ac0-9d00-5278bf54deb2 req-72e3f276-5d12-40c6-8f3b-9c640b084877 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.792085] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.815038] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.815038] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.815038] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.815038] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.815478] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.815849] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.816807] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.817114] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.817491] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.817753] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.818060] env[69992]: DEBUG nova.virt.hardware [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.819293] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0977b7-f951-4c86-8ce1-ad26e44169eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.828532] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d0d278-53f0-4947-95e0-f20ae036db0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.882457] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.015535] env[69992]: DEBUG nova.scheduler.client.report [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.029541] env[69992]: INFO nova.compute.manager [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Rebuilding instance [ 1160.038182] env[69992]: DEBUG nova.network.neutron [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updated VIF entry in instance network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.038527] env[69992]: DEBUG nova.network.neutron [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.070649] env[69992]: DEBUG nova.compute.manager [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1160.071780] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4de3211-708b-48a2-ba66-a79f0626d7e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.074917] env[69992]: INFO nova.compute.manager [-] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Took 1.29 seconds to deallocate network for instance. [ 1160.126881] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: f249c0b9-ddd7-4b63-ae3a-11035764d3e5] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.142626] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897426, 'name': CreateVM_Task, 'duration_secs': 0.66726} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.142956] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1160.143670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.143835] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.144176] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1160.144436] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e65984d-cac7-4bd4-be56-c38af7e55a5b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.149664] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1160.149664] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52cbd968-6e06-37f3-1dc1-cc1fd452f9f4" [ 1160.149664] env[69992]: _type = "Task" [ 1160.149664] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.158433] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cbd968-6e06-37f3-1dc1-cc1fd452f9f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.168621] env[69992]: DEBUG nova.network.neutron [-] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.240808] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-176ac8bf-8529-4e54-966e-4c6774fe8de7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.250970] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff12331-7239-4c48-bbbd-38170e3f5ffe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.292954] env[69992]: DEBUG nova.compute.manager [req-e5261480-3107-4ac0-9d00-5278bf54deb2 req-72e3f276-5d12-40c6-8f3b-9c640b084877 service nova] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Detach interface failed, port_id=caeee1e6-2d7b-48fe-afa7-7b1525a95c86, reason: Instance 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1160.454263] env[69992]: DEBUG nova.network.neutron [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Successfully updated port: 48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1160.541643] env[69992]: DEBUG oslo_concurrency.lockutils [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.541936] env[69992]: DEBUG nova.compute.manager [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Received event network-vif-deleted-e8395fdf-5e7f-47d5-9385-6cb5a2090486 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1160.542254] env[69992]: DEBUG nova.compute.manager [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Received event network-vif-deleted-5ac51a75-6c4c-4475-8100-da5a8a5831ff {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1160.542496] env[69992]: INFO nova.compute.manager [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Neutron deleted interface 5ac51a75-6c4c-4475-8100-da5a8a5831ff; detaching it from the instance and deleting it from the info cache [ 1160.542707] env[69992]: DEBUG nova.network.neutron [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.586789] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.631273] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e74441fc-361f-4e0b-bfdd-6f8213db51e3] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.663212] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cbd968-6e06-37f3-1dc1-cc1fd452f9f4, 'name': SearchDatastore_Task, 'duration_secs': 0.009953} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.663516] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.663756] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1160.663990] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.664525] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.664599] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.664819] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fadf982-449c-45ef-9ae2-dd9776b04382 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.670636] env[69992]: INFO nova.compute.manager [-] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Took 1.92 seconds to deallocate network for instance. [ 1160.676764] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.676831] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1160.677741] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d0088d-c182-40d6-8870-a2b1fad7b00a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.683723] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1160.683723] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523aa0b1-5228-0d74-7055-e53c07f043c1" [ 1160.683723] env[69992]: _type = "Task" [ 1160.683723] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.691838] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523aa0b1-5228-0d74-7055-e53c07f043c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.957267] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.957467] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.957649] env[69992]: DEBUG nova.network.neutron [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.026035] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.258s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.032579] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 40.226s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.046019] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-319d964d-ce47-4d97-9a30-c6a9b23c964b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.055331] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248c9482-75db-4a15-927e-346582099108 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.094212] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.094595] env[69992]: DEBUG nova.compute.manager [req-c9f02ef4-736d-432f-910f-ce321b82d245 req-75232a7e-3cea-4300-9cdf-d54c4b01986a service nova] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Detach interface failed, port_id=5ac51a75-6c4c-4475-8100-da5a8a5831ff, reason: Instance bce01d14-3c1b-4dce-b61c-721e25a56497 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1161.095069] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d828721-f5e0-4e17-8825-b164d2ac7b26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.101891] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1161.101891] env[69992]: value = "task-2897427" [ 1161.101891] env[69992]: _type = "Task" [ 1161.101891] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.110454] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.134820] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 64ab568c-a2ef-4bac-8885-3dde76f9f764] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.178961] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.194778] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523aa0b1-5228-0d74-7055-e53c07f043c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009182} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.195867] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d571b94-fe60-4bb7-924f-17bd2c25d3bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.201114] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1161.201114] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526c9fad-26ae-20c8-3e37-cd51106b5742" [ 1161.201114] env[69992]: _type = "Task" [ 1161.201114] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.209228] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526c9fad-26ae-20c8-3e37-cd51106b5742, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.490544] env[69992]: DEBUG nova.network.neutron [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1161.540757] env[69992]: INFO nova.compute.claims [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.613726] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897427, 'name': PowerOffVM_Task, 'duration_secs': 0.135499} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.613992] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1161.614240] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.614975] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c1e3de-3bb6-4d9c-a41f-fc61819e4cc6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.622431] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1161.622601] env[69992]: DEBUG nova.network.neutron [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.623705] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3cd2295-9bd0-42ac-969f-d629152ee7ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.638395] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.638641] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances with incomplete migration {{(pid=69992) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1161.647520] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1161.647665] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1161.647830] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleting the datastore file [datastore2] c1c90aa6-922d-4315-8ead-2263a55a5d6e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1161.648163] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2be4bdab-b2e4-4237-a0dd-4b0fe70a9a49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.654059] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1161.654059] env[69992]: value = "task-2897429" [ 1161.654059] env[69992]: _type = "Task" [ 1161.654059] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.662122] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.711854] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526c9fad-26ae-20c8-3e37-cd51106b5742, 'name': SearchDatastore_Task, 'duration_secs': 0.011353} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.712121] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.712409] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/37751af7-267e-4693-aaa3-cd1bb9c3d950.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1161.712678] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f145326-3f89-4a02-afa2-a1ae1b85e61a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.720795] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1161.720795] env[69992]: value = "task-2897430" [ 1161.720795] env[69992]: _type = "Task" [ 1161.720795] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.729030] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.769381] env[69992]: DEBUG nova.compute.manager [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-vif-plugged-48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.771372] env[69992]: DEBUG oslo_concurrency.lockutils [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.771372] env[69992]: DEBUG oslo_concurrency.lockutils [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.771372] env[69992]: DEBUG oslo_concurrency.lockutils [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.771372] env[69992]: DEBUG nova.compute.manager [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] No waiting events found dispatching network-vif-plugged-48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1161.771372] env[69992]: WARNING nova.compute.manager [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received unexpected event network-vif-plugged-48ef557e-b0bc-4415-84c9-60b9146b4ff7 for instance with vm_state building and task_state spawning. [ 1161.771372] env[69992]: DEBUG nova.compute.manager [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-changed-48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.771372] env[69992]: DEBUG nova.compute.manager [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing instance network info cache due to event network-changed-48ef557e-b0bc-4415-84c9-60b9146b4ff7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1161.771372] env[69992]: DEBUG oslo_concurrency.lockutils [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.048952] env[69992]: INFO nova.compute.resource_tracker [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating resource usage from migration a5252a08-401e-4a46-9c0d-2521390462d4 [ 1162.127020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.127020] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Instance network_info: |[{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1162.127992] env[69992]: DEBUG oslo_concurrency.lockutils [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.127992] env[69992]: DEBUG nova.network.neutron [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing network info cache for port 48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.128845] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:ee:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '48ef557e-b0bc-4415-84c9-60b9146b4ff7', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.136642] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.140022] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1162.140022] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a06b2c0-4304-497b-a725-68d41ef52ca4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.156321] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.163781] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.163781] env[69992]: value = "task-2897431" [ 1162.163781] env[69992]: _type = "Task" [ 1162.163781] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.170565] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103934} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.172033] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.172033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.172033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.180184] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897431, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.230831] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480113} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.233662] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/37751af7-267e-4693-aaa3-cd1bb9c3d950.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.233911] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.234672] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0cc6021-8c8b-4607-8bce-ca9ecd20ce62 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.242284] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1162.242284] env[69992]: value = "task-2897432" [ 1162.242284] env[69992]: _type = "Task" [ 1162.242284] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.253441] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897432, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.584527] env[69992]: INFO nova.compute.manager [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Swapping old allocation on dict_keys(['9dc5dd7f-a3af-48a9-a04e-f6c1d333da28']) held by migration 53d4b0cb-ec86-417c-87f6-76638a7b3c0b for instance [ 1162.594711] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e321c1-d16d-4cce-85da-7b3fd5e21a0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.604382] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a49eaf2-c16a-44ac-8570-61366f349668 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.608350] env[69992]: DEBUG nova.scheduler.client.report [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Overwriting current allocation {'allocations': {'9dc5dd7f-a3af-48a9-a04e-f6c1d333da28': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 109}}, 'project_id': '93d4e973e49e4cf98096fa30ded68db1', 'user_id': '1b2730128c5e487ea5d9b5b0ae9313ae', 'consumer_generation': 1} on consumer 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 {{(pid=69992) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1162.638999] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdc634d-ba2c-4715-a765-d459d78ed983 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.647419] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d355f9e8-2b9b-4075-8e74-966a663d4481 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.662146] env[69992]: DEBUG nova.compute.provider_tree [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.673111] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897431, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.742897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.743066] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquired lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.743208] env[69992]: DEBUG nova.network.neutron [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.754687] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069264} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.754944] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1162.755753] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1bb637-71c7-46dc-8b6c-2a00cf53c703 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.781942] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/37751af7-267e-4693-aaa3-cd1bb9c3d950.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1162.784737] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c101a5a0-62b2-43c8-bb0d-c2793b4ffedf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.804496] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1162.804496] env[69992]: value = "task-2897433" [ 1162.804496] env[69992]: _type = "Task" [ 1162.804496] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.813442] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897433, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.923666] env[69992]: DEBUG nova.network.neutron [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updated VIF entry in instance network info cache for port 48ef557e-b0bc-4415-84c9-60b9146b4ff7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.924078] env[69992]: DEBUG nova.network.neutron [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.168834] env[69992]: DEBUG nova.scheduler.client.report [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.177833] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897431, 'name': CreateVM_Task, 'duration_secs': 0.708256} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.177980] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1163.178653] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.179100] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.179529] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1163.179791] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-801c66d6-14d1-4bca-9290-75c75388bd63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.187032] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1163.187032] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52637933-b303-3384-3691-77b7804cc72e" [ 1163.187032] env[69992]: _type = "Task" [ 1163.187032] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.195155] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52637933-b303-3384-3691-77b7804cc72e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.212888] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1163.213137] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1163.213302] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1163.213486] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1163.213635] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1163.213784] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1163.213993] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1163.214168] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1163.214340] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1163.214499] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1163.214667] env[69992]: DEBUG nova.virt.hardware [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1163.215548] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df63fe5-c6fd-45c9-8606-148293611cf1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.223711] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2781c30f-73e7-430c-9d3b-4d28b8dcf18c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.236857] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1163.242467] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1163.242727] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1163.242935] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3908718a-7a5e-4a51-86ac-d42825135d01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.263061] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1163.263061] env[69992]: value = "task-2897434" [ 1163.263061] env[69992]: _type = "Task" [ 1163.263061] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.270582] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897434, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.314861] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897433, 'name': ReconfigVM_Task, 'duration_secs': 0.269679} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.315155] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/37751af7-267e-4693-aaa3-cd1bb9c3d950.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.316866] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20a77953-0c52-4951-a17e-5c95fea7cc5a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.322472] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1163.322472] env[69992]: value = "task-2897435" [ 1163.322472] env[69992]: _type = "Task" [ 1163.322472] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.330760] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897435, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.426631] env[69992]: DEBUG oslo_concurrency.lockutils [req-c842e306-d511-4549-93fd-954528b6f8df req-76954d7f-8f2d-488c-9bc8-5f16f174f103 service nova] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.496060] env[69992]: DEBUG nova.network.neutron [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [{"id": "47e06987-ed7c-4f19-8716-20716e1056c3", "address": "fa:16:3e:7b:f9:7a", "network": {"id": "3552ce43-8c78-4ad6-8a43-eb80cd2fafc8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "8737009d8272416b9d9df3315d20a145", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47e06987-ed", "ovs_interfaceid": "47e06987-ed7c-4f19-8716-20716e1056c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.674421] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.642s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.674686] env[69992]: INFO nova.compute.manager [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Migrating [ 1163.682122] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.156s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.682338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.684376] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.913s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.685949] env[69992]: INFO nova.compute.claims [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1163.706158] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52637933-b303-3384-3691-77b7804cc72e, 'name': SearchDatastore_Task, 'duration_secs': 0.010496} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.706497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.706726] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1163.706998] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.707180] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.707393] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.707676] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9d11bf7-08be-443c-b323-c246c8897e2c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.718082] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.718082] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1163.719598] env[69992]: INFO nova.scheduler.client.report [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Deleted allocations for instance 7932a42f-6a62-4c2c-be9a-3cb518fe4183 [ 1163.723246] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3fe4a57-a6f8-48b2-be8d-27cbbb6e1d5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.738173] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1163.738173] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526ff206-5889-2840-d33f-f2f683c394b1" [ 1163.738173] env[69992]: _type = "Task" [ 1163.738173] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.747895] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526ff206-5889-2840-d33f-f2f683c394b1, 'name': SearchDatastore_Task, 'duration_secs': 0.009346} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.748823] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b19e084-c1be-47b9-8e17-23fa44d9d8e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.754306] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1163.754306] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521090b9-37b3-0a33-dba0-b130f8c0b909" [ 1163.754306] env[69992]: _type = "Task" [ 1163.754306] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.764015] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521090b9-37b3-0a33-dba0-b130f8c0b909, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.771801] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897434, 'name': CreateVM_Task, 'duration_secs': 0.406635} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.772448] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1163.773180] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.773352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.773669] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1163.774350] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b477d42-3cb4-4cbc-8ada-f03d12ce1821 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.778592] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1163.778592] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec7aaf-b2b1-29d5-9cc0-221ca16f5c05" [ 1163.778592] env[69992]: _type = "Task" [ 1163.778592] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.786360] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec7aaf-b2b1-29d5-9cc0-221ca16f5c05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.832313] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897435, 'name': Rename_Task, 'duration_secs': 0.144801} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.832602] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1163.832846] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7262a497-347c-49b0-b5f3-8c6b02f99f5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.839280] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1163.839280] env[69992]: value = "task-2897436" [ 1163.839280] env[69992]: _type = "Task" [ 1163.839280] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.848651] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.000058] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Releasing lock "refresh_cache-0e8163d9-6ff5-4f1e-af33-ccb42fa46750" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.000236] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.000584] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98f31609-62e3-4da1-8cde-d4de1a529f40 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.009382] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1164.009382] env[69992]: value = "task-2897437" [ 1164.009382] env[69992]: _type = "Task" [ 1164.009382] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.017521] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.201097] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.201271] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.201505] env[69992]: DEBUG nova.network.neutron [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1164.235370] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6553ebac-4cc8-4f81-8009-2578fb01da77 tempest-ServersAdmin275Test-1945779895 tempest-ServersAdmin275Test-1945779895-project-member] Lock "7932a42f-6a62-4c2c-be9a-3cb518fe4183" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.952s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.266545] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521090b9-37b3-0a33-dba0-b130f8c0b909, 'name': SearchDatastore_Task, 'duration_secs': 0.009992} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.266812] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.267399] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 5f98a2aa-eb7b-41d2-9e9f-14cee9445942/5f98a2aa-eb7b-41d2-9e9f-14cee9445942.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1164.267399] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-160b172b-f6a3-437a-8b8e-0869714f48d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.275957] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1164.275957] env[69992]: value = "task-2897438" [ 1164.275957] env[69992]: _type = "Task" [ 1164.275957] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.294888] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.301493] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec7aaf-b2b1-29d5-9cc0-221ca16f5c05, 'name': SearchDatastore_Task, 'duration_secs': 0.008994} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.301952] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.302459] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1164.302741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.303019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.303383] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1164.303812] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39b9593d-0261-4ba4-bf53-eb5cb519fb6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.315628] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1164.315816] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1164.317157] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85675bd1-fc35-4cd3-ac7a-18d9dee04430 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.323383] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1164.323383] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a5da11-3254-8b1a-41af-41ce0c991784" [ 1164.323383] env[69992]: _type = "Task" [ 1164.323383] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.334853] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a5da11-3254-8b1a-41af-41ce0c991784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.349105] env[69992]: DEBUG oslo_vmware.api [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897436, 'name': PowerOnVM_Task, 'duration_secs': 0.5001} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.349372] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1164.349577] env[69992]: INFO nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1164.349751] env[69992]: DEBUG nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.351191] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2994c022-65cc-47e1-a991-1128f459fb6b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.520350] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897437, 'name': PowerOffVM_Task, 'duration_secs': 0.253748} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.520667] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.521463] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:47:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b7d7a3d8-e1c4-4412-993d-af11150bffcc',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-318102732',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1164.521706] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1164.521867] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1164.522066] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1164.522222] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1164.522371] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1164.522578] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1164.522737] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1164.522906] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1164.523116] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1164.523305] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1164.528676] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0276631-3636-480d-bf1a-91cf08f84e11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.551897] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1164.551897] env[69992]: value = "task-2897439" [ 1164.551897] env[69992]: _type = "Task" [ 1164.551897] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.561148] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.787463] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897438, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.841155] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a5da11-3254-8b1a-41af-41ce0c991784, 'name': SearchDatastore_Task, 'duration_secs': 0.011462} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.847844] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc283cec-3f10-4294-8fe3-4fdf5930ec87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.858023] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1164.858023] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e07b6e-772e-3d1a-8d2f-7087681513e7" [ 1164.858023] env[69992]: _type = "Task" [ 1164.858023] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.872433] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e07b6e-772e-3d1a-8d2f-7087681513e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.878606] env[69992]: INFO nova.compute.manager [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Took 51.05 seconds to build instance. [ 1164.994288] env[69992]: DEBUG nova.network.neutron [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance_info_cache with network_info: [{"id": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "address": "fa:16:3e:8b:b7:f0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0c5f07-29", "ovs_interfaceid": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.066378] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897439, 'name': ReconfigVM_Task, 'duration_secs': 0.30987} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.067265] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b56ef4-b2bb-4c11-9ff1-60deae2a0c1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.090059] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:47:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b7d7a3d8-e1c4-4412-993d-af11150bffcc',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-318102732',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1165.090330] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1165.090492] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1165.090676] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1165.090870] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1165.090966] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1165.091182] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1165.091341] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1165.091504] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1165.091665] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1165.091837] env[69992]: DEBUG nova.virt.hardware [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1165.095737] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e74af03e-d97c-4308-90ce-8625893df944 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.102197] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1165.102197] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521ac1be-c0b1-5b34-8f59-1d6e0dfeed21" [ 1165.102197] env[69992]: _type = "Task" [ 1165.102197] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.111064] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ac1be-c0b1-5b34-8f59-1d6e0dfeed21, 'name': SearchDatastore_Task, 'duration_secs': 0.006612} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.116259] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1165.119143] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8a987b7-8d24-406a-a3ae-5080a7be8e91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.141660] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1165.141660] env[69992]: value = "task-2897440" [ 1165.141660] env[69992]: _type = "Task" [ 1165.141660] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.150815] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.297851] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774297} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.298192] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 5f98a2aa-eb7b-41d2-9e9f-14cee9445942/5f98a2aa-eb7b-41d2-9e9f-14cee9445942.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1165.298443] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1165.299171] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-352db949-752a-44ab-8d0d-8e73a41a7039 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.306304] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1165.306304] env[69992]: value = "task-2897441" [ 1165.306304] env[69992]: _type = "Task" [ 1165.306304] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.315257] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.357081] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c8019f-03b8-444b-aa53-94460c28f42d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.369647] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e07b6e-772e-3d1a-8d2f-7087681513e7, 'name': SearchDatastore_Task, 'duration_secs': 0.056751} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.372123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.372200] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1165.372537] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f519c5a-83c3-44c0-8129-1f9c20e7c223 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.375359] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9a408a-2ed0-4900-b215-ecd7bcecbded {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.408370] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4f7f4e5e-7f8d-4faa-a99e-2785dbd983be tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.590s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.411133] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416b5ef9-fdc3-43a9-982d-55fe4d8c5d3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.415531] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1165.415531] env[69992]: value = "task-2897442" [ 1165.415531] env[69992]: _type = "Task" [ 1165.415531] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.423880] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752a503b-5e84-416e-8a1f-310116def4da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.432829] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.442409] env[69992]: DEBUG nova.compute.provider_tree [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.497288] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.522827] env[69992]: INFO nova.compute.manager [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Rescuing [ 1165.523421] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.523673] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.523955] env[69992]: DEBUG nova.network.neutron [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1165.653047] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897440, 'name': ReconfigVM_Task, 'duration_secs': 0.333569} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.654029] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1165.654299] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c470e9-f540-4a36-9224-09c56160f864 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.679229] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1165.679611] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97fdd9cf-b0ab-4e0e-8024-0fd8c9f8fca1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.699245] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1165.699245] env[69992]: value = "task-2897443" [ 1165.699245] env[69992]: _type = "Task" [ 1165.699245] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.708368] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897443, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.816920] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072267} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.816920] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1165.816920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36e2e2e-d422-4a5b-a4f5-55282ee7f23c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.848587] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 5f98a2aa-eb7b-41d2-9e9f-14cee9445942/5f98a2aa-eb7b-41d2-9e9f-14cee9445942.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1165.848907] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f335b00-fa57-4efa-8826-31952cb2563b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.869896] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1165.869896] env[69992]: value = "task-2897444" [ 1165.869896] env[69992]: _type = "Task" [ 1165.869896] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.879822] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.925561] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897442, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.948022] env[69992]: DEBUG nova.scheduler.client.report [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.209481] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897443, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.254672] env[69992]: DEBUG nova.network.neutron [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.380188] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897444, 'name': ReconfigVM_Task, 'duration_secs': 0.299867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.380464] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 5f98a2aa-eb7b-41d2-9e9f-14cee9445942/5f98a2aa-eb7b-41d2-9e9f-14cee9445942.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1166.381169] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88095a6e-3929-4f34-8986-e9276629d2f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.387596] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1166.387596] env[69992]: value = "task-2897445" [ 1166.387596] env[69992]: _type = "Task" [ 1166.387596] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.396442] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897445, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.425953] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563329} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.426239] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1166.426468] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1166.426749] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5b4abe2-4cb3-4108-851f-1e8c4f9fee47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.433527] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1166.433527] env[69992]: value = "task-2897446" [ 1166.433527] env[69992]: _type = "Task" [ 1166.433527] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.440982] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.458227] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.458824] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1166.461650] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.232s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.461866] env[69992]: DEBUG nova.objects.instance [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lazy-loading 'resources' on Instance uuid f64108ec-c3b2-4b11-9085-2c56b0de93f5 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.716343] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897443, 'name': ReconfigVM_Task, 'duration_secs': 0.673657} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.716598] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750/0e8163d9-6ff5-4f1e-af33-ccb42fa46750.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1166.717444] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d508e3-bf17-4329-ac2a-e19aa635c97e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.739830] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82830a42-0ed1-414e-b686-eb0f02747941 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.759901] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.763430] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59b0168-bdec-403d-bce2-049a204ef8bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.789111] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cc5f30-f233-4cf3-9d24-d8707d83779b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.796818] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.797083] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0cdb39b-88e5-43e1-949e-993e1a7a797f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.808482] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1166.808482] env[69992]: value = "task-2897447" [ 1166.808482] env[69992]: _type = "Task" [ 1166.808482] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.817286] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.899723] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897445, 'name': Rename_Task, 'duration_secs': 0.146539} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.900134] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1166.900445] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ae1ac17-c1f1-4ede-b3b7-1b385a3ded84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.905999] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1166.905999] env[69992]: value = "task-2897448" [ 1166.905999] env[69992]: _type = "Task" [ 1166.905999] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.915969] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.943603] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081847} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.944288] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1166.945098] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528097ef-8fc7-4238-8101-d11d5460406d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.965071] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1166.969036] env[69992]: DEBUG nova.compute.utils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1166.970463] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7ea4033-9294-46d1-8495-8da6ffa850b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.987351] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1166.987554] env[69992]: DEBUG nova.network.neutron [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1166.996949] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1166.996949] env[69992]: value = "task-2897449" [ 1166.996949] env[69992]: _type = "Task" [ 1166.996949] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.008912] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897449, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.014918] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1575500-83f5-473b-beb9-2e13705dbbc0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.038264] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1167.044885] env[69992]: DEBUG nova.policy [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdc7f71c9c4b4d40bf40b631c24b5ee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17ab89c6cf054418a4dd1a0e61b3a5e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1167.322319] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897447, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.385196] env[69992]: DEBUG nova.network.neutron [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Successfully created port: ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1167.417554] env[69992]: DEBUG oslo_vmware.api [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897448, 'name': PowerOnVM_Task, 'duration_secs': 0.494496} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.420831] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1167.421120] env[69992]: INFO nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Took 7.63 seconds to spawn the instance on the hypervisor. [ 1167.421319] env[69992]: DEBUG nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1167.422412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16764ff8-d9c3-40c4-971a-5da9345036c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.487354] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1167.509242] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897449, 'name': ReconfigVM_Task, 'duration_secs': 0.331451} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.511464] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Reconfigured VM instance instance-00000044 to attach disk [datastore1] c1c90aa6-922d-4315-8ead-2263a55a5d6e/c1c90aa6-922d-4315-8ead-2263a55a5d6e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.512302] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c39ad178-ecac-47d0-aff5-2a72efdcfc0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.519330] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1167.519330] env[69992]: value = "task-2897450" [ 1167.519330] env[69992]: _type = "Task" [ 1167.519330] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.528967] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897450, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.552128] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.552572] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bae3137-3af4-4f03-8cbe-7cdebdd19ac5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.563045] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1167.563045] env[69992]: value = "task-2897451" [ 1167.563045] env[69992]: _type = "Task" [ 1167.563045] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.571909] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.621321] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a8dd80-6e06-43fd-8f53-90c48e4d7f26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.631236] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561916b4-2ea0-4407-9004-041ef2afa739 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.666621] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff393a0d-0ccd-47e9-83e8-9cfb9de82a42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.676120] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17e5c50-a318-48d0-9265-fbfe4396f9b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.689358] env[69992]: DEBUG nova.compute.provider_tree [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.826593] env[69992]: DEBUG oslo_vmware.api [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897447, 'name': PowerOnVM_Task, 'duration_secs': 0.667294} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.828088] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1167.944928] env[69992]: INFO nova.compute.manager [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Took 48.26 seconds to build instance. [ 1168.029275] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897450, 'name': Rename_Task, 'duration_secs': 0.232219} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.029561] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1168.029824] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b2e12e7-6f2e-437b-b3de-68aa3d2bb2b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.036544] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1168.036544] env[69992]: value = "task-2897452" [ 1168.036544] env[69992]: _type = "Task" [ 1168.036544] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.044436] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.073644] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897451, 'name': PowerOffVM_Task, 'duration_secs': 0.337801} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.073961] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.074171] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1168.194162] env[69992]: DEBUG nova.scheduler.client.report [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.312721] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.313120] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f123b5a6-7304-41d6-ae6f-8e16fcc1a52f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.323911] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1168.323911] env[69992]: value = "task-2897453" [ 1168.323911] env[69992]: _type = "Task" [ 1168.323911] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.339844] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.446671] env[69992]: DEBUG oslo_concurrency.lockutils [None req-138d1163-c218-44a6-b086-d6f96d93de98 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.777s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.499722] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1168.549987] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897452, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.556604] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1168.556924] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.556990] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1168.560526] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.560526] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1168.560706] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1168.561273] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1168.561273] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1168.561541] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1168.561740] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1168.562057] env[69992]: DEBUG nova.virt.hardware [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1168.562851] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33527ca-2ce1-433c-9630-7e3888fdd0e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.579120] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ee88f5-77da-4cbe-8b12-d9982620fd6c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.588346] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1168.588584] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.588752] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1168.588929] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.589094] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1168.589250] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1168.590098] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1168.590098] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1168.590098] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1168.590098] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1168.590302] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1168.595232] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab5b8b89-f094-4d8f-be7a-e60d5d69ed69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.617666] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1168.617666] env[69992]: value = "task-2897454" [ 1168.617666] env[69992]: _type = "Task" [ 1168.617666] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.629465] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.697254] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.699642] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.167s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.699835] env[69992]: DEBUG nova.objects.instance [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1168.734159] env[69992]: INFO nova.scheduler.client.report [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Deleted allocations for instance f64108ec-c3b2-4b11-9085-2c56b0de93f5 [ 1168.840237] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897453, 'name': PowerOffVM_Task, 'duration_secs': 0.244065} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.840237] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.841495] env[69992]: INFO nova.compute.manager [None req-1a8fdf66-1111-44d9-bb6b-00c0080e6197 tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance to original state: 'active' [ 1168.844956] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e6a05c-26b9-49e6-b23b-e5a138e357e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.852677] env[69992]: DEBUG nova.compute.manager [req-e5860196-8f67-4764-b746-5e2611c05ef4 req-5875c2fe-670a-4dc1-a71c-297002008e7d service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Received event network-vif-plugged-ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.852677] env[69992]: DEBUG oslo_concurrency.lockutils [req-e5860196-8f67-4764-b746-5e2611c05ef4 req-5875c2fe-670a-4dc1-a71c-297002008e7d service nova] Acquiring lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.852677] env[69992]: DEBUG oslo_concurrency.lockutils [req-e5860196-8f67-4764-b746-5e2611c05ef4 req-5875c2fe-670a-4dc1-a71c-297002008e7d service nova] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.852677] env[69992]: DEBUG oslo_concurrency.lockutils [req-e5860196-8f67-4764-b746-5e2611c05ef4 req-5875c2fe-670a-4dc1-a71c-297002008e7d service nova] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.852677] env[69992]: DEBUG nova.compute.manager [req-e5860196-8f67-4764-b746-5e2611c05ef4 req-5875c2fe-670a-4dc1-a71c-297002008e7d service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] No waiting events found dispatching network-vif-plugged-ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1168.852677] env[69992]: WARNING nova.compute.manager [req-e5860196-8f67-4764-b746-5e2611c05ef4 req-5875c2fe-670a-4dc1-a71c-297002008e7d service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Received unexpected event network-vif-plugged-ebc337ca-1f7f-449a-85a1-1af599dd4a19 for instance with vm_state building and task_state spawning. [ 1168.867848] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b65142a-aa65-41a3-80f8-6d5582263d5a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.908549] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.909353] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fe0be84-c312-4764-b74a-5ca4cbdaf086 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.917017] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1168.917017] env[69992]: value = "task-2897455" [ 1168.917017] env[69992]: _type = "Task" [ 1168.917017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.927766] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1168.927766] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1168.927766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.927766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.927965] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.928177] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-956e9313-2581-4e35-85e7-301824790ee8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.938467] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.938667] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1168.939409] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf6e0e1-9040-43cf-9a27-c592d8ce3974 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.945383] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1168.945383] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce0ae9-c10b-50eb-9528-5dfe235f5c72" [ 1168.945383] env[69992]: _type = "Task" [ 1168.945383] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.953155] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce0ae9-c10b-50eb-9528-5dfe235f5c72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.973400] env[69992]: DEBUG nova.network.neutron [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Successfully updated port: ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1169.051799] env[69992]: DEBUG oslo_vmware.api [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897452, 'name': PowerOnVM_Task, 'duration_secs': 0.757279} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.052254] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1169.052585] env[69992]: DEBUG nova.compute.manager [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1169.054520] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b5f80f-79f5-4c1e-b075-d39e1800eec3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.129322] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897454, 'name': ReconfigVM_Task, 'duration_secs': 0.185968} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.130425] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1169.242987] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4bcd6e34-4405-4272-9ebc-aae19569fb07 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "f64108ec-c3b2-4b11-9085-2c56b0de93f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.431s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.458297] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce0ae9-c10b-50eb-9528-5dfe235f5c72, 'name': SearchDatastore_Task, 'duration_secs': 0.009228} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.459657] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69ef50a6-721f-4b5f-93ff-200b62d7bf9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.466389] env[69992]: DEBUG nova.compute.manager [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-changed-48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1169.466717] env[69992]: DEBUG nova.compute.manager [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing instance network info cache due to event network-changed-48ef557e-b0bc-4415-84c9-60b9146b4ff7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1169.469224] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.469224] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.469224] env[69992]: DEBUG nova.network.neutron [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing network info cache for port 48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1169.472343] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1169.472343] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52423555-68f2-af96-84b5-4e0dd6c82c1b" [ 1169.472343] env[69992]: _type = "Task" [ 1169.472343] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.480817] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.481081] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.481327] env[69992]: DEBUG nova.network.neutron [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1169.490078] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52423555-68f2-af96-84b5-4e0dd6c82c1b, 'name': SearchDatastore_Task, 'duration_secs': 0.009967} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.490933] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.491270] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. {{(pid=69992) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1169.491848] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-595d1cae-2223-44f0-9056-5c150a186777 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.500192] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1169.500192] env[69992]: value = "task-2897456" [ 1169.500192] env[69992]: _type = "Task" [ 1169.500192] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.509976] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897456, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.576543] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.638556] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1169.638964] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1169.639299] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1169.639432] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1169.639658] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1169.639813] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1169.640118] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1169.640322] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1169.640535] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1169.640740] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1169.641132] env[69992]: DEBUG nova.virt.hardware [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1169.647526] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1169.647906] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c348630c-0003-4787-b735-7f24b41957f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.670189] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1169.670189] env[69992]: value = "task-2897457" [ 1169.670189] env[69992]: _type = "Task" [ 1169.670189] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.680018] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.713538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6bbfec3c-feb0-4058-82cc-e6e89b04ffa9 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.714781] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.989s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.716449] env[69992]: INFO nova.compute.claims [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.012568] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897456, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478459} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.012858] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. [ 1170.013721] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adbaab9-7651-4af7-8542-142465c950f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.039293] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.042191] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c600fa6-c67a-4111-b504-e537fbbd8824 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.059828] env[69992]: DEBUG nova.network.neutron [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1170.067195] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1170.067195] env[69992]: value = "task-2897458" [ 1170.067195] env[69992]: _type = "Task" [ 1170.067195] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.079573] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897458, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.191122] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897457, 'name': ReconfigVM_Task, 'duration_secs': 0.23068} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.191550] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1170.192758] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281b7ad5-135d-406f-a1b0-1fb2d421a603 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.239016] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079/9df7b187-e579-41b0-9d24-be2a1ae93079.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1170.241144] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59ab2168-4ee2-4160-b0cf-0b5f231ad0d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.263984] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1170.263984] env[69992]: value = "task-2897459" [ 1170.263984] env[69992]: _type = "Task" [ 1170.263984] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.276222] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.326903] env[69992]: DEBUG nova.network.neutron [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updating instance_info_cache with network_info: [{"id": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "address": "fa:16:3e:8e:40:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc337ca-1f", "ovs_interfaceid": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.418912] env[69992]: DEBUG nova.network.neutron [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updated VIF entry in instance network info cache for port 48ef557e-b0bc-4415-84c9-60b9146b4ff7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1170.419292] env[69992]: DEBUG nova.network.neutron [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.575626] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.667502] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.667788] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.667999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.668334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.668433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.671432] env[69992]: INFO nova.compute.manager [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Terminating instance [ 1170.779803] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.828499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.829148] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Instance network_info: |[{"id": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "address": "fa:16:3e:8e:40:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc337ca-1f", "ovs_interfaceid": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1170.829286] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:40:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebc337ca-1f7f-449a-85a1-1af599dd4a19', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1170.836785] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating folder: Project (17ab89c6cf054418a4dd1a0e61b3a5e8). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1170.839377] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6adfed9a-304c-4d94-a2af-b0f276de87eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.850047] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created folder: Project (17ab89c6cf054418a4dd1a0e61b3a5e8) in parent group-v581821. [ 1170.850288] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating folder: Instances. Parent ref: group-v582027. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1170.853058] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6138af09-6a99-4882-8bb9-4f90e4029e3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.871564] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created folder: Instances in parent group-v582027. [ 1170.871564] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1170.871564] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1170.871564] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44b9e347-d165-4274-830e-8272380794da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.893112] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1170.893112] env[69992]: value = "task-2897462" [ 1170.893112] env[69992]: _type = "Task" [ 1170.893112] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.903072] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897462, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.922895] env[69992]: DEBUG oslo_concurrency.lockutils [req-7a04011d-e7e8-4aa2-9fcd-c79eb96a5943 req-b7e17608-b950-41ea-a14d-bd8a566c028d service nova] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.969714] env[69992]: DEBUG nova.compute.manager [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Received event network-changed-ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.970142] env[69992]: DEBUG nova.compute.manager [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Refreshing instance network info cache due to event network-changed-ebc337ca-1f7f-449a-85a1-1af599dd4a19. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1170.970234] env[69992]: DEBUG oslo_concurrency.lockutils [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] Acquiring lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.970384] env[69992]: DEBUG oslo_concurrency.lockutils [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] Acquired lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.970546] env[69992]: DEBUG nova.network.neutron [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Refreshing network info cache for port ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.075402] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897458, 'name': ReconfigVM_Task, 'duration_secs': 0.722184} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.075751] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.078643] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9088aabe-ea07-4ad2-9284-e5bb5e66af00 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.104849] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7619bfc0-c311-4e99-8f66-07d2aaaf5a54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.121985] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1171.121985] env[69992]: value = "task-2897463" [ 1171.121985] env[69992]: _type = "Task" [ 1171.121985] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.131147] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.177169] env[69992]: DEBUG nova.compute.manager [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1171.177410] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.178400] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c77d85a-d340-492d-98f2-08ca21ba86f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.185990] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.186424] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56a69c6b-7482-4078-9abf-9415420c9eb5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.192036] env[69992]: DEBUG oslo_vmware.api [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1171.192036] env[69992]: value = "task-2897464" [ 1171.192036] env[69992]: _type = "Task" [ 1171.192036] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.206578] env[69992]: DEBUG oslo_vmware.api [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.279011] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.364567] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d10168b-e154-4d6b-9f88-367760251501 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.372429] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f63d69-7dcf-4d2b-9848-256382b599cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.410504] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf393c6d-8c56-45dd-b115-a7901af66297 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.418441] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897462, 'name': CreateVM_Task, 'duration_secs': 0.391038} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.420730] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1171.421657] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.421872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.422332] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1171.423737] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f377f7f-23e9-4bab-a344-1b73c43824cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.428728] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1ccdb39-90d5-4fbc-9fd1-3fc214e2b452 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.430968] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.431305] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.431562] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.431794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.431995] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.434537] env[69992]: INFO nova.compute.manager [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Terminating instance [ 1171.445163] env[69992]: DEBUG nova.compute.provider_tree [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.450022] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1171.450022] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5254a0cd-867c-6d26-687d-de78f0728fc7" [ 1171.450022] env[69992]: _type = "Task" [ 1171.450022] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.457060] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5254a0cd-867c-6d26-687d-de78f0728fc7, 'name': SearchDatastore_Task, 'duration_secs': 0.009586} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.458101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.458271] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1171.458509] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.458661] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.458838] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1171.459374] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4aa23465-699c-49c1-a6dc-2f8050bc4624 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.467337] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1171.467544] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1171.468225] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86c8c497-149a-47a4-a1a9-275ea5b27b1b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.473317] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1171.473317] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ae6e8a-8843-519b-81db-4ed6f9976111" [ 1171.473317] env[69992]: _type = "Task" [ 1171.473317] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.484017] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ae6e8a-8843-519b-81db-4ed6f9976111, 'name': SearchDatastore_Task, 'duration_secs': 0.008304} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.484778] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-704783d5-7a44-4181-b65d-6d551152c34c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.489645] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1171.489645] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5257c39b-2e53-489d-6d49-bb07abb6130d" [ 1171.489645] env[69992]: _type = "Task" [ 1171.489645] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.497931] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5257c39b-2e53-489d-6d49-bb07abb6130d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.633430] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897463, 'name': ReconfigVM_Task, 'duration_secs': 0.245569} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.633851] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1171.634220] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bfcb023-84aa-41fa-8cc5-bb94b21c4103 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.641331] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1171.641331] env[69992]: value = "task-2897465" [ 1171.641331] env[69992]: _type = "Task" [ 1171.641331] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.649745] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897465, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.687950] env[69992]: DEBUG nova.network.neutron [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updated VIF entry in instance network info cache for port ebc337ca-1f7f-449a-85a1-1af599dd4a19. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.687950] env[69992]: DEBUG nova.network.neutron [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updating instance_info_cache with network_info: [{"id": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "address": "fa:16:3e:8e:40:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc337ca-1f", "ovs_interfaceid": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.702483] env[69992]: DEBUG oslo_vmware.api [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897464, 'name': PowerOffVM_Task, 'duration_secs': 0.202655} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.703518] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.703844] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.704051] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84a61a65-4c1c-4a4b-8fc4-1604fae2d3b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.775080] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.776375] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.776375] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleting the datastore file [datastore2] 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.778817] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-486f0f28-736d-40a2-8321-e50ba782c8e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.780588] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897459, 'name': ReconfigVM_Task, 'duration_secs': 1.179031} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.780848] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079/9df7b187-e579-41b0-9d24-be2a1ae93079.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1171.781132] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1171.789940] env[69992]: DEBUG oslo_vmware.api [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1171.789940] env[69992]: value = "task-2897467" [ 1171.789940] env[69992]: _type = "Task" [ 1171.789940] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.798219] env[69992]: DEBUG oslo_vmware.api [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.952022] env[69992]: DEBUG nova.scheduler.client.report [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1171.954032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "refresh_cache-c1c90aa6-922d-4315-8ead-2263a55a5d6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.954363] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "refresh_cache-c1c90aa6-922d-4315-8ead-2263a55a5d6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.954644] env[69992]: DEBUG nova.network.neutron [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.001167] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5257c39b-2e53-489d-6d49-bb07abb6130d, 'name': SearchDatastore_Task, 'duration_secs': 0.008721} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.001587] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.001987] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb/88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1172.002751] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0aab6544-7c1f-487d-87d5-dfe1a261fe17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.013019] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1172.013019] env[69992]: value = "task-2897468" [ 1172.013019] env[69992]: _type = "Task" [ 1172.013019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.020471] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.151656] env[69992]: DEBUG oslo_vmware.api [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897465, 'name': PowerOnVM_Task, 'duration_secs': 0.44574} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.151948] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1172.155148] env[69992]: DEBUG nova.compute.manager [None req-3f21f86e-39cd-4ff2-9fa4-33f135880e15 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1172.155982] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9bbecd-add3-4f09-96b9-7b4d7b3f3499 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.191181] env[69992]: DEBUG oslo_concurrency.lockutils [req-35a7f580-7928-42d5-a496-d44a4c809507 req-3618a35c-7902-4298-8889-68284195c234 service nova] Releasing lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.288306] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903e764d-50af-4bfa-8186-90b040ec5635 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.319156] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00734c54-ea8f-4d89-9f40-e88885ea75fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.323063] env[69992]: DEBUG oslo_vmware.api [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172573} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.323715] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.323909] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.324102] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.324280] env[69992]: INFO nova.compute.manager [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1172.324526] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1172.325221] env[69992]: DEBUG nova.compute.manager [-] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1172.325332] env[69992]: DEBUG nova.network.neutron [-] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1172.346059] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1172.457670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.458304] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1172.463284] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.081s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.463517] env[69992]: DEBUG nova.objects.instance [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lazy-loading 'resources' on Instance uuid 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.499455] env[69992]: DEBUG nova.network.neutron [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1172.526407] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897468, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.648363] env[69992]: DEBUG nova.network.neutron [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.779160] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "a06d4b38-0e39-46ef-a588-7627661cb201" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.779433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "a06d4b38-0e39-46ef-a588-7627661cb201" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.779648] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "a06d4b38-0e39-46ef-a588-7627661cb201-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.779870] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "a06d4b38-0e39-46ef-a588-7627661cb201-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.780122] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "a06d4b38-0e39-46ef-a588-7627661cb201-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.783228] env[69992]: INFO nova.compute.manager [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Terminating instance [ 1172.906925] env[69992]: DEBUG nova.network.neutron [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Port fd0c5f07-29de-4e64-a60c-655c3da4bb9e binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1172.964472] env[69992]: DEBUG nova.compute.utils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1172.965867] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1172.969296] env[69992]: DEBUG nova.network.neutron [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1173.024710] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572115} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.024973] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb/88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1173.028696] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1173.028989] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c02b41f-17ca-4d5c-be64-e8cc5e23ec8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.034779] env[69992]: DEBUG nova.policy [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc6792edfe6245d2ba77a14aba041ca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '658cab8ee4194f7f98dd07de450f248b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1173.044360] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1173.044360] env[69992]: value = "task-2897469" [ 1173.044360] env[69992]: _type = "Task" [ 1173.044360] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.053304] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897469, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.152720] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "refresh_cache-c1c90aa6-922d-4315-8ead-2263a55a5d6e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.153195] env[69992]: DEBUG nova.compute.manager [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1173.153401] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1173.154469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833f199d-c035-4691-af15-5cbe33088940 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.162597] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1173.162977] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62ab929f-cb74-4a28-8b68-0a76bfe53d95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.173495] env[69992]: DEBUG oslo_vmware.api [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1173.173495] env[69992]: value = "task-2897470" [ 1173.173495] env[69992]: _type = "Task" [ 1173.173495] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.182934] env[69992]: DEBUG oslo_vmware.api [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.213353] env[69992]: DEBUG nova.compute.manager [req-2cc7affe-3651-491d-81db-ddd19432fead req-b686a374-9f0f-432a-bfe2-3a18aa17be89 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Received event network-vif-deleted-47e06987-ed7c-4f19-8716-20716e1056c3 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1173.213547] env[69992]: INFO nova.compute.manager [req-2cc7affe-3651-491d-81db-ddd19432fead req-b686a374-9f0f-432a-bfe2-3a18aa17be89 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Neutron deleted interface 47e06987-ed7c-4f19-8716-20716e1056c3; detaching it from the instance and deleting it from the info cache [ 1173.213715] env[69992]: DEBUG nova.network.neutron [req-2cc7affe-3651-491d-81db-ddd19432fead req-b686a374-9f0f-432a-bfe2-3a18aa17be89 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.287698] env[69992]: DEBUG nova.compute.manager [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1173.288055] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1173.289676] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46bf502-f2ee-4f20-89e2-14c84188ac95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.297077] env[69992]: DEBUG nova.network.neutron [-] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.298527] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1173.299265] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf0ffecb-24ac-4c4a-a546-ac9fa85f2da3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.306579] env[69992]: DEBUG oslo_vmware.api [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1173.306579] env[69992]: value = "task-2897471" [ 1173.306579] env[69992]: _type = "Task" [ 1173.306579] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.317762] env[69992]: DEBUG oslo_vmware.api [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.448404] env[69992]: DEBUG nova.compute.manager [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1173.448634] env[69992]: DEBUG nova.compute.manager [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing instance network info cache due to event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1173.448795] env[69992]: DEBUG oslo_concurrency.lockutils [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.448935] env[69992]: DEBUG oslo_concurrency.lockutils [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.450013] env[69992]: DEBUG nova.network.neutron [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1173.466143] env[69992]: DEBUG nova.network.neutron [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Successfully created port: da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.475179] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1173.556384] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897469, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120062} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.556970] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.557979] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b9583c-ab81-4d4c-a5ee-71f0a916c6c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.587653] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb/88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.591703] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a9686f5-d172-4b97-ac6e-3e24d2d97649 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.613242] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1173.613242] env[69992]: value = "task-2897472" [ 1173.613242] env[69992]: _type = "Task" [ 1173.613242] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.629164] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897472, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.689157] env[69992]: DEBUG oslo_vmware.api [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897470, 'name': PowerOffVM_Task, 'duration_secs': 0.32837} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.690424] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.690424] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1173.690424] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-640a25d5-d362-4955-b5bc-e8e14ece422f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.699032] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbb26a5-dfd5-4473-a85d-b5e17b988d5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.706255] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fc660b-d01a-4229-9468-f9131da14e9e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.742322] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c71815f-0bf3-4c31-bf53-1031c7d360d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.744663] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b35534d-1404-470e-970f-1f624dc9d6f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.747101] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1173.747309] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1173.747488] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleting the datastore file [datastore1] c1c90aa6-922d-4315-8ead-2263a55a5d6e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.749548] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1496c9b9-5a60-4361-90ba-1fa0ea486ee8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.767504] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1338c1-a056-4fea-a8cb-1b6e79cd7dfd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.772048] env[69992]: DEBUG oslo_vmware.api [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1173.772048] env[69992]: value = "task-2897474" [ 1173.772048] env[69992]: _type = "Task" [ 1173.772048] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.774753] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e27a08-21dd-4000-962f-62ccfb962cef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.797555] env[69992]: DEBUG nova.compute.provider_tree [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.799646] env[69992]: INFO nova.compute.manager [-] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Took 1.47 seconds to deallocate network for instance. [ 1173.804572] env[69992]: DEBUG oslo_vmware.api [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.827831] env[69992]: DEBUG nova.compute.manager [req-2cc7affe-3651-491d-81db-ddd19432fead req-b686a374-9f0f-432a-bfe2-3a18aa17be89 service nova] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Detach interface failed, port_id=47e06987-ed7c-4f19-8716-20716e1056c3, reason: Instance 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1173.833091] env[69992]: DEBUG oslo_vmware.api [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897471, 'name': PowerOffVM_Task, 'duration_secs': 0.291822} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.833361] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.833542] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1173.833778] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b944020e-aa2c-446f-8d38-88e691a5ee43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.911781] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1173.911781] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1173.911781] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Deleting the datastore file [datastore2] a06d4b38-0e39-46ef-a588-7627661cb201 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.912102] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbde15ab-dd2b-4a1e-9320-3595829daed9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.925053] env[69992]: DEBUG oslo_vmware.api [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for the task: (returnval){ [ 1173.925053] env[69992]: value = "task-2897476" [ 1173.925053] env[69992]: _type = "Task" [ 1173.925053] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.932029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.932029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.932193] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.942336] env[69992]: DEBUG oslo_vmware.api [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.123249] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897472, 'name': ReconfigVM_Task, 'duration_secs': 0.321173} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.123429] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb/88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.123954] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-162d5b57-71b9-4e3d-905f-0a337346b925 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.130317] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1174.130317] env[69992]: value = "task-2897477" [ 1174.130317] env[69992]: _type = "Task" [ 1174.130317] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.140554] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897477, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.141705] env[69992]: DEBUG nova.network.neutron [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updated VIF entry in instance network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1174.142022] env[69992]: DEBUG nova.network.neutron [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.287177] env[69992]: DEBUG oslo_vmware.api [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150656} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.287460] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1174.287622] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1174.287809] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1174.288024] env[69992]: INFO nova.compute.manager [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1174.288273] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1174.288471] env[69992]: DEBUG nova.compute.manager [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1174.288562] env[69992]: DEBUG nova.network.neutron [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1174.300667] env[69992]: DEBUG nova.scheduler.client.report [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.313114] env[69992]: DEBUG nova.network.neutron [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1174.329385] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.440034] env[69992]: DEBUG oslo_vmware.api [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Task: {'id': task-2897476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139386} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.440371] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1174.440655] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1174.440942] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1174.441059] env[69992]: INFO nova.compute.manager [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1174.441370] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1174.441613] env[69992]: DEBUG nova.compute.manager [-] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1174.441739] env[69992]: DEBUG nova.network.neutron [-] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1174.486434] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1174.512857] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9d1d2e210d47dfb465e8906f7d775826',container_format='bare',created_at=2025-03-10T17:50:31Z,direct_url=,disk_format='vmdk',id=10b701c1-9a32-4c7e-a195-4676726c8b8e,min_disk=1,min_ram=0,name='tempest-test-snap-637188217',owner='658cab8ee4194f7f98dd07de450f248b',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-10T17:50:48Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1174.513183] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.513356] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1174.513942] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.513942] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1174.513942] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1174.514125] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1174.515015] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1174.515015] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1174.515015] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1174.515015] env[69992]: DEBUG nova.virt.hardware [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1174.519617] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fae2c33-7140-4a35-81bc-6503e73990fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.531307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca942a2-237b-41e2-a89f-2b871a2421c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.640835] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897477, 'name': Rename_Task, 'duration_secs': 0.148342} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.641264] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.641445] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-677911ff-0623-460a-a9dc-beac261e4140 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.644125] env[69992]: DEBUG oslo_concurrency.lockutils [req-9de73238-9c52-4183-a2f7-11a15172f7d3 req-ffee3a42-cddd-45a0-aa5e-2888d097a02c service nova] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.653525] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1174.653525] env[69992]: value = "task-2897478" [ 1174.653525] env[69992]: _type = "Task" [ 1174.653525] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.661436] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897478, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.811840] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.348s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.814478] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 26.760s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.817431] env[69992]: DEBUG nova.network.neutron [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.841576] env[69992]: INFO nova.scheduler.client.report [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Deleted allocations for instance 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3 [ 1174.980928] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.980928] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.980928] env[69992]: DEBUG nova.network.neutron [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1175.066047] env[69992]: DEBUG nova.network.neutron [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Successfully updated port: da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1175.167047] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897478, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.185303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "451a8af1-a4a2-4c2d-932c-58955491433b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.185303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "451a8af1-a4a2-4c2d-932c-58955491433b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.207935] env[69992]: DEBUG nova.network.neutron [-] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.322873] env[69992]: INFO nova.compute.manager [-] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Took 1.03 seconds to deallocate network for instance. [ 1175.357532] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9a36473-1504-4dc6-9cbf-d71baa39ae97 tempest-ServersListShow2100Test-1337120459 tempest-ServersListShow2100Test-1337120459-project-member] Lock "4609d6ce-9d5b-408d-8cb6-1baf76d85bb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.193s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.413826] env[69992]: DEBUG nova.compute.manager [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Received event network-vif-deleted-e4c81d0e-1575-49d0-98f5-9fd01f35158c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.415015] env[69992]: DEBUG nova.compute.manager [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Received event network-vif-plugged-da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.415015] env[69992]: DEBUG oslo_concurrency.lockutils [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] Acquiring lock "408de352-797c-40c2-86bc-359e01c5c04e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.415015] env[69992]: DEBUG oslo_concurrency.lockutils [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] Lock "408de352-797c-40c2-86bc-359e01c5c04e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.415015] env[69992]: DEBUG oslo_concurrency.lockutils [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] Lock "408de352-797c-40c2-86bc-359e01c5c04e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.415015] env[69992]: DEBUG nova.compute.manager [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] No waiting events found dispatching network-vif-plugged-da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1175.415547] env[69992]: WARNING nova.compute.manager [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Received unexpected event network-vif-plugged-da4c3835-de23-4fe6-804e-cfd3dc1580cc for instance with vm_state building and task_state spawning. [ 1175.415839] env[69992]: DEBUG nova.compute.manager [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Received event network-changed-da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.416163] env[69992]: DEBUG nova.compute.manager [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Refreshing instance network info cache due to event network-changed-da4c3835-de23-4fe6-804e-cfd3dc1580cc. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1175.416490] env[69992]: DEBUG oslo_concurrency.lockutils [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] Acquiring lock "refresh_cache-408de352-797c-40c2-86bc-359e01c5c04e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.416870] env[69992]: DEBUG oslo_concurrency.lockutils [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] Acquired lock "refresh_cache-408de352-797c-40c2-86bc-359e01c5c04e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.417193] env[69992]: DEBUG nova.network.neutron [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Refreshing network info cache for port da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1175.574229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "refresh_cache-408de352-797c-40c2-86bc-359e01c5c04e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.630900] env[69992]: DEBUG nova.compute.manager [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.630971] env[69992]: DEBUG nova.compute.manager [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing instance network info cache due to event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1175.631220] env[69992]: DEBUG oslo_concurrency.lockutils [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.631381] env[69992]: DEBUG oslo_concurrency.lockutils [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.631524] env[69992]: DEBUG nova.network.neutron [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1175.662025] env[69992]: DEBUG oslo_vmware.api [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897478, 'name': PowerOnVM_Task, 'duration_secs': 0.517627} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.664277] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.664607] env[69992]: INFO nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1175.664682] env[69992]: DEBUG nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.665595] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fbdeeb-d202-4a63-a61e-507c921d9dfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.689635] env[69992]: DEBUG nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1175.711704] env[69992]: INFO nova.compute.manager [-] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Took 1.27 seconds to deallocate network for instance. [ 1175.813403] env[69992]: DEBUG nova.network.neutron [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance_info_cache with network_info: [{"id": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "address": "fa:16:3e:8b:b7:f0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0c5f07-29", "ovs_interfaceid": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.831546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.894142] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a567ddd-298c-4e96-bbc2-0c78c9476ac7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.903231] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b136006-cb95-4513-a16f-638e3cc45296 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.938126] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530c6e7d-abc9-4ac7-aa78-70b6f8a1dcf7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.949402] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97678b43-4ed5-4182-807a-bc344fa4869e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.962889] env[69992]: DEBUG nova.compute.provider_tree [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.969314] env[69992]: DEBUG nova.network.neutron [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1176.053034] env[69992]: DEBUG nova.network.neutron [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.184174] env[69992]: INFO nova.compute.manager [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Took 41.43 seconds to build instance. [ 1176.220658] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.224860] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.316862] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.367520] env[69992]: DEBUG nova.network.neutron [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updated VIF entry in instance network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1176.367915] env[69992]: DEBUG nova.network.neutron [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.466302] env[69992]: DEBUG nova.scheduler.client.report [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.555536] env[69992]: DEBUG oslo_concurrency.lockutils [req-fae0788d-6ce2-4292-bd49-08c49d371c1e req-b42bf0db-21df-4e5c-b11b-3d1830353074 service nova] Releasing lock "refresh_cache-408de352-797c-40c2-86bc-359e01c5c04e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.555962] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "refresh_cache-408de352-797c-40c2-86bc-359e01c5c04e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.556382] env[69992]: DEBUG nova.network.neutron [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1176.684778] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8e0aa115-983f-4155-8047-d78c2a10921c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.941s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.848988] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be05195b-e857-461d-9cb5-75933fd6e52c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.872187] env[69992]: DEBUG oslo_concurrency.lockutils [req-9cafbb52-1282-49b9-9cc1-4185eca41949 req-caab42e3-291e-4914-a06d-bd43e8b447b4 service nova] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.874981] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8cf1b2-5a8e-40fe-8ea3-a62213d1f907 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.882384] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1177.123397] env[69992]: DEBUG nova.network.neutron [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1177.324816] env[69992]: DEBUG nova.network.neutron [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Updating instance_info_cache with network_info: [{"id": "da4c3835-de23-4fe6-804e-cfd3dc1580cc", "address": "fa:16:3e:6b:3e:c3", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4c3835-de", "ovs_interfaceid": "da4c3835-de23-4fe6-804e-cfd3dc1580cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.388371] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1177.388688] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-135baf5f-e14f-49e0-a133-5a9d4fb69310 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.396733] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1177.396733] env[69992]: value = "task-2897479" [ 1177.396733] env[69992]: _type = "Task" [ 1177.396733] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.405493] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.437857] env[69992]: DEBUG nova.compute.manager [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Received event network-changed-ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.437857] env[69992]: DEBUG nova.compute.manager [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Refreshing instance network info cache due to event network-changed-ebc337ca-1f7f-449a-85a1-1af599dd4a19. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.437857] env[69992]: DEBUG oslo_concurrency.lockutils [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] Acquiring lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.437857] env[69992]: DEBUG oslo_concurrency.lockutils [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] Acquired lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.438212] env[69992]: DEBUG nova.network.neutron [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Refreshing network info cache for port ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.476167] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.662s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.476402] env[69992]: DEBUG nova.compute.manager [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69992) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1177.479182] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.946s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.479403] env[69992]: DEBUG nova.objects.instance [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lazy-loading 'resources' on Instance uuid bcb5131c-b2c6-4971-8a2e-4fcd7133442d {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.588181] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.588416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.721693] env[69992]: DEBUG nova.compute.manager [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.721897] env[69992]: DEBUG nova.compute.manager [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing instance network info cache due to event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.722130] env[69992]: DEBUG oslo_concurrency.lockutils [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.722275] env[69992]: DEBUG oslo_concurrency.lockutils [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.722441] env[69992]: DEBUG nova.network.neutron [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.828460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "refresh_cache-408de352-797c-40c2-86bc-359e01c5c04e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.828809] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Instance network_info: |[{"id": "da4c3835-de23-4fe6-804e-cfd3dc1580cc", "address": "fa:16:3e:6b:3e:c3", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4c3835-de", "ovs_interfaceid": "da4c3835-de23-4fe6-804e-cfd3dc1580cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1177.829241] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:3e:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da4c3835-de23-4fe6-804e-cfd3dc1580cc', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.847295] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.847295] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1177.847525] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b27b9bf-e4b8-4169-a27c-48371563e564 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.867467] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.867467] env[69992]: value = "task-2897480" [ 1177.867467] env[69992]: _type = "Task" [ 1177.867467] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.876690] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897480, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.906348] env[69992]: DEBUG oslo_vmware.api [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897479, 'name': PowerOnVM_Task, 'duration_secs': 0.413002} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.906545] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1177.906761] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b54fdb05-9437-45eb-9eaf-23f1d15120c9 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance '9df7b187-e579-41b0-9d24-be2a1ae93079' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1178.043736] env[69992]: INFO nova.scheduler.client.report [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted allocation for migration a6bc0f28-8d6f-4922-895d-929bc5809dd2 [ 1178.091479] env[69992]: DEBUG nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1178.202099] env[69992]: DEBUG nova.network.neutron [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updated VIF entry in instance network info cache for port ebc337ca-1f7f-449a-85a1-1af599dd4a19. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.202483] env[69992]: DEBUG nova.network.neutron [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updating instance_info_cache with network_info: [{"id": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "address": "fa:16:3e:8e:40:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc337ca-1f", "ovs_interfaceid": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.290778] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "37751af7-267e-4693-aaa3-cd1bb9c3d950" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.291099] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.291325] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "37751af7-267e-4693-aaa3-cd1bb9c3d950-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.291530] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.291701] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.296472] env[69992]: INFO nova.compute.manager [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Terminating instance [ 1178.377734] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897480, 'name': CreateVM_Task, 'duration_secs': 0.323938} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.380382] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1178.381539] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.382471] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.382471] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1178.382471] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84701b80-9055-41c0-a6f0-8a33472fb4a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.387158] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1178.387158] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52cb204b-61f3-6f48-6e07-7542cb38bcbe" [ 1178.387158] env[69992]: _type = "Task" [ 1178.387158] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.397699] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cb204b-61f3-6f48-6e07-7542cb38bcbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.437689] env[69992]: DEBUG nova.network.neutron [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updated VIF entry in instance network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.438107] env[69992]: DEBUG nova.network.neutron [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.465527] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc2943f-6442-4794-8ac2-20f9cd944c66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.473552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6b4277-d1d2-4d78-a528-0edead10bb3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.504414] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6798e2ad-7621-4be1-a73a-391adfb9359f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.511135] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e43799-1cf4-41bd-8695-7e60f8206cb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.523719] env[69992]: DEBUG nova.compute.provider_tree [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.554501] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5abb01a-e8e9-493d-b6be-0cdc8b1d67c2 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 33.973s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.612489] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.705395] env[69992]: DEBUG oslo_concurrency.lockutils [req-8ddc7436-84e1-4cfd-8543-b8861a8f8486 req-334018c3-4665-4af6-bdee-ec97844bc279 service nova] Releasing lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.799744] env[69992]: DEBUG nova.compute.manager [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1178.800057] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1178.800957] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369bf2e5-519d-47f1-8fa1-9b0495504c89 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.808649] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1178.808937] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7a79d41-9c36-4992-96e1-cef91d6ff8fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.815384] env[69992]: DEBUG oslo_vmware.api [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1178.815384] env[69992]: value = "task-2897481" [ 1178.815384] env[69992]: _type = "Task" [ 1178.815384] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.823605] env[69992]: DEBUG oslo_vmware.api [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.899591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.900228] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Processing image 10b701c1-9a32-4c7e-a195-4676726c8b8e {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.900228] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.900360] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.900631] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.900814] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25a0342c-7d0f-4a58-bdd8-1d27c62672ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.910454] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.910708] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1178.911405] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1d1d792-59ea-4400-b709-eb54f2a3f861 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.921947] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1178.921947] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bbc1d5-dacc-5ef0-f99d-8997c842c536" [ 1178.921947] env[69992]: _type = "Task" [ 1178.921947] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.932066] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1178.932066] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Fetch image to [datastore2] OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441/OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1178.932279] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Downloading stream optimized image 10b701c1-9a32-4c7e-a195-4676726c8b8e to [datastore2] OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441/OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441.vmdk on the data store datastore2 as vApp {{(pid=69992) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1178.932349] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Downloading image file data 10b701c1-9a32-4c7e-a195-4676726c8b8e to the ESX as VM named 'OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441' {{(pid=69992) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1178.940817] env[69992]: DEBUG oslo_concurrency.lockutils [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.941081] env[69992]: DEBUG nova.compute.manager [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1178.941291] env[69992]: DEBUG nova.compute.manager [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing instance network info cache due to event network-changed-b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1178.941527] env[69992]: DEBUG oslo_concurrency.lockutils [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] Acquiring lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.941736] env[69992]: DEBUG oslo_concurrency.lockutils [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] Acquired lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.941923] env[69992]: DEBUG nova.network.neutron [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Refreshing network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1179.014091] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1179.014091] env[69992]: value = "resgroup-9" [ 1179.014091] env[69992]: _type = "ResourcePool" [ 1179.014091] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1179.014503] env[69992]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-fd51ba18-1bd1-43b1-87fa-7bf80acd23fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.030797] env[69992]: DEBUG nova.scheduler.client.report [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1179.041225] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease: (returnval){ [ 1179.041225] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1179.041225] env[69992]: _type = "HttpNfcLease" [ 1179.041225] env[69992]: } obtained for vApp import into resource pool (val){ [ 1179.041225] env[69992]: value = "resgroup-9" [ 1179.041225] env[69992]: _type = "ResourcePool" [ 1179.041225] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1179.041225] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the lease: (returnval){ [ 1179.041225] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1179.041225] env[69992]: _type = "HttpNfcLease" [ 1179.041225] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1179.050400] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1179.050400] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1179.050400] env[69992]: _type = "HttpNfcLease" [ 1179.050400] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1179.325290] env[69992]: DEBUG oslo_vmware.api [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897481, 'name': PowerOffVM_Task, 'duration_secs': 0.224471} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.325570] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1179.325744] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1179.325996] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adab3553-dbd6-4e41-a765-ac152e5384ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.381133] env[69992]: DEBUG nova.objects.instance [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'flavor' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.399276] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1179.399276] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1179.399276] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Deleting the datastore file [datastore2] 37751af7-267e-4693-aaa3-cd1bb9c3d950 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1179.399762] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b7f285a-43ce-430b-806c-b2fe6014206f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.407821] env[69992]: DEBUG oslo_vmware.api [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for the task: (returnval){ [ 1179.407821] env[69992]: value = "task-2897484" [ 1179.407821] env[69992]: _type = "Task" [ 1179.407821] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.415514] env[69992]: DEBUG oslo_vmware.api [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.536706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.538936] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.271s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.539413] env[69992]: DEBUG nova.objects.instance [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'resources' on Instance uuid f2ac32d7-d32b-497a-a262-ab1cd95f87d0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.548885] env[69992]: DEBUG nova.objects.instance [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lazy-loading 'flavor' on Instance uuid 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.555445] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1179.555445] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1179.555445] env[69992]: _type = "HttpNfcLease" [ 1179.555445] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1179.570363] env[69992]: INFO nova.scheduler.client.report [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted allocations for instance bcb5131c-b2c6-4971-8a2e-4fcd7133442d [ 1179.648843] env[69992]: DEBUG nova.network.neutron [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updated VIF entry in instance network info cache for port b42eb1be-6903-43f9-8796-f03e6defbfce. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1179.649205] env[69992]: DEBUG nova.network.neutron [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [{"id": "b42eb1be-6903-43f9-8796-f03e6defbfce", "address": "fa:16:3e:ec:df:3f", "network": {"id": "30fc2190-9d50-41ef-815f-31d0c520b954", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1488904420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "85e3c4a655d445658f21b46f360dcfe8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb42eb1be-69", "ovs_interfaceid": "b42eb1be-6903-43f9-8796-f03e6defbfce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.886131] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.886914] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.887097] env[69992]: DEBUG nova.network.neutron [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.887315] env[69992]: DEBUG nova.objects.instance [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'info_cache' on Instance uuid a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.918646] env[69992]: DEBUG oslo_vmware.api [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Task: {'id': task-2897484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215501} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.918909] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1179.919111] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1179.919294] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1179.919473] env[69992]: INFO nova.compute.manager [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1179.919716] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1179.919928] env[69992]: DEBUG nova.compute.manager [-] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1179.920044] env[69992]: DEBUG nova.network.neutron [-] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1180.056379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.057373] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.061383] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1180.061383] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1180.061383] env[69992]: _type = "HttpNfcLease" [ 1180.061383] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1180.081463] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b9fae5d9-6dd4-4cb6-a3eb-666a9ae796e8 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "bcb5131c-b2c6-4971-8a2e-4fcd7133442d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.133441] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "9df7b187-e579-41b0-9d24-be2a1ae93079" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.134035] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.134372] env[69992]: DEBUG nova.compute.manager [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Going to confirm migration 4 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1180.152422] env[69992]: DEBUG oslo_concurrency.lockutils [req-855ba4f5-fa2f-4f08-85a5-32096ad53f4c req-d9528e44-916a-4997-b7f2-3c9944763464 service nova] Releasing lock "refresh_cache-37751af7-267e-4693-aaa3-cd1bb9c3d950" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.301832] env[69992]: DEBUG nova.compute.manager [req-cb60f618-37a2-460d-97c2-cbf2f37e4d8c req-6cb9cfdd-ad23-4571-8a80-c88791cc678b service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Received event network-vif-deleted-b42eb1be-6903-43f9-8796-f03e6defbfce {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1180.302829] env[69992]: INFO nova.compute.manager [req-cb60f618-37a2-460d-97c2-cbf2f37e4d8c req-6cb9cfdd-ad23-4571-8a80-c88791cc678b service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Neutron deleted interface b42eb1be-6903-43f9-8796-f03e6defbfce; detaching it from the instance and deleting it from the info cache [ 1180.303168] env[69992]: DEBUG nova.network.neutron [req-cb60f618-37a2-460d-97c2-cbf2f37e4d8c req-6cb9cfdd-ad23-4571-8a80-c88791cc678b service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.390739] env[69992]: DEBUG nova.objects.base [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1180.442536] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "a49b4721-e338-4e60-b91e-137caa3c9c03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.442536] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.442536] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "a49b4721-e338-4e60-b91e-137caa3c9c03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.442536] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.442536] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.445655] env[69992]: INFO nova.compute.manager [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Terminating instance [ 1180.537419] env[69992]: DEBUG nova.network.neutron [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1180.557667] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1180.557667] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1180.557667] env[69992]: _type = "HttpNfcLease" [ 1180.557667] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1180.564222] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c2c307-f36e-46c7-8008-91e06460c97a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.571321] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34d89e8-824c-4aa4-a8e7-bfecc6fe703c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.603296] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31dad90-abc4-476c-8c4d-15776accf7a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.610306] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c124d4-5886-4935-848e-72e608629e48 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.624905] env[69992]: DEBUG nova.compute.provider_tree [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.694448] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.694637] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.694814] env[69992]: DEBUG nova.network.neutron [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1180.694995] env[69992]: DEBUG nova.objects.instance [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lazy-loading 'info_cache' on Instance uuid 9df7b187-e579-41b0-9d24-be2a1ae93079 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.778629] env[69992]: DEBUG nova.network.neutron [-] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.806189] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a3a39e4-b454-4fac-8470-b0457d05dfe6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.815967] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a6229d-075d-49ff-9d6e-a2924030c779 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.853481] env[69992]: DEBUG nova.compute.manager [req-cb60f618-37a2-460d-97c2-cbf2f37e4d8c req-6cb9cfdd-ad23-4571-8a80-c88791cc678b service nova] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Detach interface failed, port_id=b42eb1be-6903-43f9-8796-f03e6defbfce, reason: Instance 37751af7-267e-4693-aaa3-cd1bb9c3d950 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1180.951061] env[69992]: DEBUG nova.compute.manager [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1180.951248] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1180.952131] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5441d0ac-f044-4460-ba53-0f79c5e9f60c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.959812] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1180.962312] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c3bcee3-bbd9-4d5c-bb8b-87f5efd7b5a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.969960] env[69992]: DEBUG oslo_vmware.api [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1180.969960] env[69992]: value = "task-2897485" [ 1180.969960] env[69992]: _type = "Task" [ 1180.969960] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.977603] env[69992]: DEBUG oslo_vmware.api [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.058750] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1181.058750] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1181.058750] env[69992]: _type = "HttpNfcLease" [ 1181.058750] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1181.059234] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1181.059234] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52834def-0399-e719-a24e-58ffefd70628" [ 1181.059234] env[69992]: _type = "HttpNfcLease" [ 1181.059234] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1181.060400] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f045adc7-aa35-4622-a79a-f9d4680a693b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.071530] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5299581a-b690-0c55-bd25-16ae93733ff9/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1181.071827] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5299581a-b690-0c55-bd25-16ae93733ff9/disk-0.vmdk. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1181.147101] env[69992]: DEBUG nova.scheduler.client.report [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.164790] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d03f41b9-2f60-45ed-b5f7-f0b07236783a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.233454] env[69992]: DEBUG nova.network.neutron [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [{"id": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "address": "fa:16:3e:02:93:e2", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb39fa912-b0", "ovs_interfaceid": "b39fa912-b02a-4764-8cc8-f79e08d575c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.282650] env[69992]: INFO nova.compute.manager [-] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Took 1.36 seconds to deallocate network for instance. [ 1181.396499] env[69992]: DEBUG nova.network.neutron [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.480501] env[69992]: DEBUG oslo_vmware.api [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897485, 'name': PowerOffVM_Task, 'duration_secs': 0.35478} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.480776] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1181.480957] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1181.481316] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d9e6a41-c8dc-427b-b6b7-08dbdf1d62e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.556597] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1181.556828] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1181.557018] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleting the datastore file [datastore1] a49b4721-e338-4e60-b91e-137caa3c9c03 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1181.557288] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a45593c-0552-4e94-bcbb-b845952928fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.564681] env[69992]: DEBUG oslo_vmware.api [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1181.564681] env[69992]: value = "task-2897487" [ 1181.564681] env[69992]: _type = "Task" [ 1181.564681] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.574694] env[69992]: DEBUG oslo_vmware.api [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.660312] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.121s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.663459] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.691s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.665033] env[69992]: INFO nova.compute.claims [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1181.684124] env[69992]: INFO nova.scheduler.client.report [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocations for instance f2ac32d7-d32b-497a-a262-ab1cd95f87d0 [ 1181.736358] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-a7f01cd7-f148-48fc-a71a-5461672d6039" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.792027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.793474] env[69992]: DEBUG nova.objects.instance [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lazy-loading 'flavor' on Instance uuid 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.899322] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.899596] env[69992]: DEBUG nova.compute.manager [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Inject network info {{(pid=69992) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1181.899857] env[69992]: DEBUG nova.compute.manager [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] network_info to inject: |[{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1181.904955] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Reconfiguring VM instance to set the machine id {{(pid=69992) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1181.906568] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-589535ce-abec-47dc-8a72-4714eeee00fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.930833] env[69992]: DEBUG oslo_vmware.api [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1181.930833] env[69992]: value = "task-2897488" [ 1181.930833] env[69992]: _type = "Task" [ 1181.930833] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.938949] env[69992]: DEBUG oslo_vmware.api [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.939996] env[69992]: DEBUG nova.network.neutron [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance_info_cache with network_info: [{"id": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "address": "fa:16:3e:8b:b7:f0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd0c5f07-29", "ovs_interfaceid": "fd0c5f07-29de-4e64-a60c-655c3da4bb9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.077330] env[69992]: DEBUG oslo_vmware.api [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147875} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.078698] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1182.078943] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1182.079156] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1182.079341] env[69992]: INFO nova.compute.manager [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1182.079657] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1182.079962] env[69992]: DEBUG nova.compute.manager [-] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1182.080015] env[69992]: DEBUG nova.network.neutron [-] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1182.118433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.118781] env[69992]: DEBUG oslo_concurrency.lockutils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.183180] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1182.183351] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5299581a-b690-0c55-bd25-16ae93733ff9/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1182.184326] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f809a19-bfdb-40c1-8c37-873dbf6b5081 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.191298] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5299581a-b690-0c55-bd25-16ae93733ff9/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1182.191571] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5299581a-b690-0c55-bd25-16ae93733ff9/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1182.194060] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1a4706d9-5066-4ac3-b8ff-4a3fe0e3bf7e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.196291] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3eedfdbd-7c63-4bd6-891e-b473032a4afb tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "f2ac32d7-d32b-497a-a262-ab1cd95f87d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.657s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.299855] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.299855] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.387169] env[69992]: DEBUG oslo_vmware.rw_handles [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5299581a-b690-0c55-bd25-16ae93733ff9/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1182.387354] env[69992]: INFO nova.virt.vmwareapi.images [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Downloaded image file data 10b701c1-9a32-4c7e-a195-4676726c8b8e [ 1182.388273] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a05746-ab44-45d7-bd6c-cc6126e9264d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.406039] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5de6c7cf-64d9-45d9-a0a3-c5791ccfcf52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.440589] env[69992]: DEBUG oslo_vmware.api [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897488, 'name': ReconfigVM_Task, 'duration_secs': 0.227918} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.440945] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b83fef25-1438-446a-8692-8f28eab4a0d3 tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Reconfigured VM instance to set the machine id {{(pid=69992) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1182.443560] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-9df7b187-e579-41b0-9d24-be2a1ae93079" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.443785] env[69992]: DEBUG nova.objects.instance [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lazy-loading 'migration_context' on Instance uuid 9df7b187-e579-41b0-9d24-be2a1ae93079 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.498026] env[69992]: INFO nova.virt.vmwareapi.images [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] The imported VM was unregistered [ 1182.502029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1182.502029] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating directory with path [datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1182.502029] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1b8c522-b8df-411e-b541-c278c9f4d35d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.514360] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created directory with path [datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1182.514360] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441/OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441.vmdk to [datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk. {{(pid=69992) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1182.514589] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-137ae98f-6fad-4922-8171-ab0c877eaf92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.522978] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1182.522978] env[69992]: value = "task-2897490" [ 1182.522978] env[69992]: _type = "Task" [ 1182.522978] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.533254] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.622630] env[69992]: DEBUG nova.compute.utils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.640323] env[69992]: DEBUG nova.network.neutron [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1182.745581] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.745907] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ab48665-99a6-4fa5-afb5-5a19142e53c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.754280] env[69992]: DEBUG oslo_vmware.api [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1182.754280] env[69992]: value = "task-2897491" [ 1182.754280] env[69992]: _type = "Task" [ 1182.754280] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.759615] env[69992]: DEBUG nova.compute.manager [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received event network-changed-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1182.759804] env[69992]: DEBUG nova.compute.manager [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing instance network info cache due to event network-changed-abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1182.760029] env[69992]: DEBUG oslo_concurrency.lockutils [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.765928] env[69992]: DEBUG oslo_vmware.api [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.908031] env[69992]: DEBUG nova.network.neutron [-] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.948826] env[69992]: DEBUG nova.objects.base [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Object Instance<9df7b187-e579-41b0-9d24-be2a1ae93079> lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1182.950076] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f34a59-f7e0-4cf3-aa72-e4359a4664d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.973581] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ffd4b83-fa45-4ffe-a1df-6d735f58ca32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.979821] env[69992]: DEBUG oslo_vmware.api [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1182.979821] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d9e416-fd77-3b47-504f-4abd1b616608" [ 1182.979821] env[69992]: _type = "Task" [ 1182.979821] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.992318] env[69992]: DEBUG oslo_vmware.api [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d9e416-fd77-3b47-504f-4abd1b616608, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.035089] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.125616] env[69992]: DEBUG oslo_concurrency.lockutils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.140823] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d45d73-a43d-4f0d-b7ff-3dd009e81938 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.153009] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64d25bd-c3da-4172-a646-857393a2aceb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.188738] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e7fc49-263f-48ca-8a9d-68a2a7737c74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.202002] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350df7cf-9dca-4d6b-aafa-365dfcd88540 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.219620] env[69992]: DEBUG nova.compute.provider_tree [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.271592] env[69992]: DEBUG oslo_vmware.api [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897491, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.368279] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bf3c93f7-7151-47c1-bb75-0d107558f0dd tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "b2e45269-62cc-4266-916e-89cc95ffe981" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.368474] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bf3c93f7-7151-47c1-bb75-0d107558f0dd tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "b2e45269-62cc-4266-916e-89cc95ffe981" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.410177] env[69992]: INFO nova.compute.manager [-] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Took 1.33 seconds to deallocate network for instance. [ 1183.417049] env[69992]: DEBUG nova.network.neutron [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.493514] env[69992]: DEBUG oslo_vmware.api [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d9e416-fd77-3b47-504f-4abd1b616608, 'name': SearchDatastore_Task, 'duration_secs': 0.016972} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.493823] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.536475] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.723045] env[69992]: DEBUG nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.774972] env[69992]: DEBUG oslo_vmware.api [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897491, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.875098] env[69992]: DEBUG nova.compute.manager [None req-bf3c93f7-7151-47c1-bb75-0d107558f0dd tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: b2e45269-62cc-4266-916e-89cc95ffe981] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1183.921432] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.922071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.922399] env[69992]: DEBUG nova.compute.manager [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Inject network info {{(pid=69992) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1183.922776] env[69992]: DEBUG nova.compute.manager [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] network_info to inject: |[{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1183.932469] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Reconfiguring VM instance to set the machine id {{(pid=69992) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1183.933714] env[69992]: DEBUG oslo_concurrency.lockutils [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.933714] env[69992]: DEBUG nova.network.neutron [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing network info cache for port abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1183.935489] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39384252-8f76-4c92-9f74-88c8459c64d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.962347] env[69992]: DEBUG oslo_vmware.api [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1183.962347] env[69992]: value = "task-2897492" [ 1183.962347] env[69992]: _type = "Task" [ 1183.962347] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.976948] env[69992]: DEBUG oslo_vmware.api [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897492, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.037919] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.198031] env[69992]: DEBUG oslo_concurrency.lockutils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.198338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.198580] env[69992]: INFO nova.compute.manager [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Attaching volume 78d5f802-a2b3-4c3d-9484-3ea2397e9ab5 to /dev/sdb [ 1184.228730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.229293] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1184.232600] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.234s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.234094] env[69992]: INFO nova.compute.claims [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1184.241603] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4465fc-b3cd-439c-99b7-e1c14e410e26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.252682] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dc6785-2536-4e21-9195-ffcc907e796d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.275058] env[69992]: DEBUG nova.virt.block_device [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating existing volume attachment record: b4062642-4e6a-4894-b63e-51e959391273 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1184.285394] env[69992]: DEBUG oslo_vmware.api [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897491, 'name': PowerOnVM_Task, 'duration_secs': 1.031536} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.285945] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1184.286265] env[69992]: DEBUG nova.compute.manager [None req-2e7d1392-d2ab-4817-a884-6d12c0a7c36d tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1184.287986] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5d8882-6ee3-4109-bc3d-66a9ff3c035f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.329334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.329803] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.330137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.330278] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.330458] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.332713] env[69992]: INFO nova.compute.manager [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Terminating instance [ 1184.381448] env[69992]: DEBUG nova.compute.manager [None req-bf3c93f7-7151-47c1-bb75-0d107558f0dd tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: b2e45269-62cc-4266-916e-89cc95ffe981] Instance disappeared before build. {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1184.473760] env[69992]: DEBUG oslo_vmware.api [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897492, 'name': ReconfigVM_Task, 'duration_secs': 0.192132} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.474212] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2e77957a-cc9c-4022-82c1-f06805e621da tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Reconfigured VM instance to set the machine id {{(pid=69992) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1184.535983] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.749396] env[69992]: DEBUG nova.compute.utils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1184.751378] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1184.751614] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1184.785745] env[69992]: DEBUG nova.network.neutron [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updated VIF entry in instance network info cache for port abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.785745] env[69992]: DEBUG nova.network.neutron [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.808100] env[69992]: DEBUG nova.compute.manager [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Received event network-vif-deleted-11df5954-2f09-4c8f-bab8-a5b6740bd994 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.808786] env[69992]: DEBUG nova.compute.manager [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received event network-changed-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.808786] env[69992]: DEBUG nova.compute.manager [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing instance network info cache due to event network-changed-abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1184.808786] env[69992]: DEBUG oslo_concurrency.lockutils [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] Acquiring lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.819841] env[69992]: DEBUG nova.policy [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94516a830322454c90c08043118e547a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6090a2d03daf46e9b687d24fde64fb72', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1184.839208] env[69992]: DEBUG nova.compute.manager [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1184.839208] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1184.839208] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8bb752-5ad7-4775-b96d-396b7e66a531 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.847819] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1184.848185] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da353041-5af2-4e38-9558-539279d42d28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.854853] env[69992]: DEBUG oslo_vmware.api [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1184.854853] env[69992]: value = "task-2897496" [ 1184.854853] env[69992]: _type = "Task" [ 1184.854853] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.864080] env[69992]: DEBUG oslo_vmware.api [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.894385] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bf3c93f7-7151-47c1-bb75-0d107558f0dd tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "b2e45269-62cc-4266-916e-89cc95ffe981" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.526s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.038887] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.167505] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Successfully created port: 7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1185.255620] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1185.288849] env[69992]: DEBUG oslo_concurrency.lockutils [req-adac8cdc-dd21-479e-ac33-4e0ddb1fffc7 req-d48eb242-582a-483b-97a0-4d13dd068984 service nova] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.289199] env[69992]: DEBUG oslo_concurrency.lockutils [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] Acquired lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.289421] env[69992]: DEBUG nova.network.neutron [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Refreshing network info cache for port abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.311030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.311030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.368811] env[69992]: DEBUG oslo_vmware.api [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897496, 'name': PowerOffVM_Task, 'duration_secs': 0.449473} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.369314] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.369832] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.373096] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5537af49-7ab2-4ae5-8b0a-1054db33bbfd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.462447] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.466038] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.466038] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Deleting the datastore file [datastore2] 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.466038] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b37d0328-5298-42be-9b5a-8500883737b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.474705] env[69992]: DEBUG oslo_vmware.api [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for the task: (returnval){ [ 1185.474705] env[69992]: value = "task-2897498" [ 1185.474705] env[69992]: _type = "Task" [ 1185.474705] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.484526] env[69992]: DEBUG oslo_vmware.api [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.537290] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897490, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.671375} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.539969] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441/OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441.vmdk to [datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk. [ 1185.540189] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Cleaning up location [datastore2] OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1185.540401] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_0b6d6505-5762-4b02-9eb4-1db30ad46441 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.540855] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc6ac49-e3c4-46cf-8e8b-c348bdb67935 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.547267] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1185.547267] env[69992]: value = "task-2897499" [ 1185.547267] env[69992]: _type = "Task" [ 1185.547267] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.557158] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.683197] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.683460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.683775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.683851] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.684159] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.688913] env[69992]: INFO nova.compute.manager [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Terminating instance [ 1185.725365] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e565fb6-d38e-4d18-9a6f-5900ead16959 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.732603] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134c809f-cd37-420f-ab94-b67d68c1f267 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.770208] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8eb99bf-7c50-4f6a-86ed-eec729dc3a8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.777998] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6b23af-9feb-4397-979b-9323f20fb5b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.791866] env[69992]: DEBUG nova.compute.provider_tree [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1185.816999] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1185.984890] env[69992]: DEBUG oslo_vmware.api [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Task: {'id': task-2897498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.428433} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.985247] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1185.985502] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1185.985630] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1185.985814] env[69992]: INFO nova.compute.manager [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1185.986083] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.986666] env[69992]: DEBUG nova.compute.manager [-] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1185.986768] env[69992]: DEBUG nova.network.neutron [-] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1186.001954] env[69992]: DEBUG nova.network.neutron [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updated VIF entry in instance network info cache for port abab8d85-8633-4722-85d1-b21be464919d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.002386] env[69992]: DEBUG nova.network.neutron [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [{"id": "abab8d85-8633-4722-85d1-b21be464919d", "address": "fa:16:3e:65:03:5b", "network": {"id": "adeff25c-7ce7-4915-aa2a-82f338cf74ca", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-834451172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef82945e1f93479ea4a19fbe1855870b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabab8d85-86", "ovs_interfaceid": "abab8d85-8633-4722-85d1-b21be464919d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.057421] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159089} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.057674] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.057848] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.058127] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk to [datastore2] 408de352-797c-40c2-86bc-359e01c5c04e/408de352-797c-40c2-86bc-359e01c5c04e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1186.058394] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03b667e0-ab79-4751-ae13-5d6e05e171d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.065013] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1186.065013] env[69992]: value = "task-2897500" [ 1186.065013] env[69992]: _type = "Task" [ 1186.065013] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.072995] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.197894] env[69992]: DEBUG nova.compute.manager [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1186.198153] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.199075] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c089fb60-4ff3-45c0-84a8-1989e3a898fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.207486] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.207734] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3417d9c1-9f5e-4f6e-9f25-11751c468a09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.214754] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1186.214754] env[69992]: value = "task-2897501" [ 1186.214754] env[69992]: _type = "Task" [ 1186.214754] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.224220] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.270799] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1186.301658] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1186.301925] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.302122] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1186.302361] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.302533] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1186.302697] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1186.302915] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1186.303107] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1186.303749] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1186.303749] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1186.303749] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1186.304520] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7abaae-3dc5-4ee7-8ddc-195a6d19bd60 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.313236] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2a132a-ec8a-437e-9dfd-730510a37e80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.331483] env[69992]: ERROR nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [req-6702ffe4-ea5a-46a9-852f-a3c5085e0afb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6702ffe4-ea5a-46a9-852f-a3c5085e0afb"}]} [ 1186.349488] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.350825] env[69992]: DEBUG nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1186.365580] env[69992]: DEBUG nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1186.365859] env[69992]: DEBUG nova.compute.provider_tree [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1186.378123] env[69992]: DEBUG nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1186.402909] env[69992]: DEBUG nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1186.506109] env[69992]: DEBUG oslo_concurrency.lockutils [req-f50d4e8e-5e4c-4494-bb16-25e09633a3fc req-7f2cc51a-55e3-4411-a23a-52dc3c64a07d service nova] Releasing lock "refresh_cache-30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.584853] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897500, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.725032] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897501, 'name': PowerOffVM_Task, 'duration_secs': 0.398117} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.725355] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.725355] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1186.725600] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7c83cdb-bb59-44fb-8f38-bdd66fb9dd8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.737910] env[69992]: DEBUG nova.compute.manager [req-21f111fe-ea26-4811-9809-0c944d3dcaac req-9fa5c387-d55c-4c72-9cda-190e9c06d24a service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Received event network-vif-deleted-abab8d85-8633-4722-85d1-b21be464919d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.741364] env[69992]: INFO nova.compute.manager [req-21f111fe-ea26-4811-9809-0c944d3dcaac req-9fa5c387-d55c-4c72-9cda-190e9c06d24a service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Neutron deleted interface abab8d85-8633-4722-85d1-b21be464919d; detaching it from the instance and deleting it from the info cache [ 1186.741744] env[69992]: DEBUG nova.network.neutron [req-21f111fe-ea26-4811-9809-0c944d3dcaac req-9fa5c387-d55c-4c72-9cda-190e9c06d24a service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.795376] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Successfully updated port: 7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1186.826805] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1186.827206] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1186.828041] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleting the datastore file [datastore2] a7f01cd7-f148-48fc-a71a-5461672d6039 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.828041] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d68cc4d8-8f1f-4139-9667-3c32e243c296 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.838929] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1186.838929] env[69992]: value = "task-2897504" [ 1186.838929] env[69992]: _type = "Task" [ 1186.838929] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.848458] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.923336] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8988660b-acff-44d3-b254-a9c130d7ffd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.931719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda2c894-f767-474d-8c96-b23dfb6f517d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.964475] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13a1cf3-d25b-4306-9b65-a39a40ffc25f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.968062] env[69992]: DEBUG nova.compute.manager [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Received event network-vif-plugged-7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.968285] env[69992]: DEBUG oslo_concurrency.lockutils [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] Acquiring lock "a35dd590-b5ff-4878-8aa5-8797814d8779-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.968498] env[69992]: DEBUG oslo_concurrency.lockutils [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.968668] env[69992]: DEBUG oslo_concurrency.lockutils [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.968836] env[69992]: DEBUG nova.compute.manager [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] No waiting events found dispatching network-vif-plugged-7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1186.969012] env[69992]: WARNING nova.compute.manager [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Received unexpected event network-vif-plugged-7feb60bd-3eed-4a64-b356-4d949eb60a7e for instance with vm_state building and task_state spawning. [ 1186.969181] env[69992]: DEBUG nova.compute.manager [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Received event network-changed-7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.969331] env[69992]: DEBUG nova.compute.manager [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Refreshing instance network info cache due to event network-changed-7feb60bd-3eed-4a64-b356-4d949eb60a7e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1186.969508] env[69992]: DEBUG oslo_concurrency.lockutils [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] Acquiring lock "refresh_cache-a35dd590-b5ff-4878-8aa5-8797814d8779" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.969638] env[69992]: DEBUG oslo_concurrency.lockutils [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] Acquired lock "refresh_cache-a35dd590-b5ff-4878-8aa5-8797814d8779" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.969795] env[69992]: DEBUG nova.network.neutron [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Refreshing network info cache for port 7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1186.977207] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfa02ea-d6c1-4bf6-8359-49e5f1ee9cdc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.995213] env[69992]: DEBUG nova.compute.provider_tree [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1187.010546] env[69992]: DEBUG nova.network.neutron [-] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.077860] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897500, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.244387] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-599c58f7-ac3f-4247-8029-1b991dc5ca54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.254576] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c692c6-618b-4466-b3af-e3f841e967e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.292997] env[69992]: DEBUG nova.compute.manager [req-21f111fe-ea26-4811-9809-0c944d3dcaac req-9fa5c387-d55c-4c72-9cda-190e9c06d24a service nova] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Detach interface failed, port_id=abab8d85-8633-4722-85d1-b21be464919d, reason: Instance 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1187.301680] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "refresh_cache-a35dd590-b5ff-4878-8aa5-8797814d8779" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.348330] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.503232] env[69992]: DEBUG nova.network.neutron [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1187.514304] env[69992]: INFO nova.compute.manager [-] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Took 1.53 seconds to deallocate network for instance. [ 1187.530653] env[69992]: DEBUG nova.scheduler.client.report [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 115 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1187.530956] env[69992]: DEBUG nova.compute.provider_tree [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 115 to 116 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1187.531192] env[69992]: DEBUG nova.compute.provider_tree [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1187.577649] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897500, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.587151] env[69992]: DEBUG nova.network.neutron [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.850496] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.022211] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.037907] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.805s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.037907] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1188.041194] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.159s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.041447] env[69992]: DEBUG nova.objects.instance [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lazy-loading 'resources' on Instance uuid b7a1b9e1-4d57-435f-bdb6-51481968aacb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.078328] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897500, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.089969] env[69992]: DEBUG oslo_concurrency.lockutils [req-5e93db2c-058c-4915-9025-ba3770fa0491 req-52619950-6bd0-462b-a8a8-67d578658a6b service nova] Releasing lock "refresh_cache-a35dd590-b5ff-4878-8aa5-8797814d8779" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.091037] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "refresh_cache-a35dd590-b5ff-4878-8aa5-8797814d8779" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.091037] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.350739] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.547910] env[69992]: DEBUG nova.compute.utils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1188.549509] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1188.549699] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1188.580754] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897500, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.444072} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.580754] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/10b701c1-9a32-4c7e-a195-4676726c8b8e/10b701c1-9a32-4c7e-a195-4676726c8b8e.vmdk to [datastore2] 408de352-797c-40c2-86bc-359e01c5c04e/408de352-797c-40c2-86bc-359e01c5c04e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1188.583567] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f1e66e-0cd8-48f7-8056-2465322b586b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.607219] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 408de352-797c-40c2-86bc-359e01c5c04e/408de352-797c-40c2-86bc-359e01c5c04e.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.608603] env[69992]: DEBUG nova.policy [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94516a830322454c90c08043118e547a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6090a2d03daf46e9b687d24fde64fb72', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1188.611785] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c74fb76-dd16-4520-8652-cb34a8898489 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.631139] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1188.631139] env[69992]: value = "task-2897505" [ 1188.631139] env[69992]: _type = "Task" [ 1188.631139] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.639933] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.651520] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1188.849669] env[69992]: DEBUG oslo_vmware.api [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.687135} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.850135] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.850402] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.850608] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.850898] env[69992]: INFO nova.compute.manager [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 2.65 seconds to destroy the instance on the hypervisor. [ 1188.854173] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.854173] env[69992]: DEBUG nova.compute.manager [-] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1188.854173] env[69992]: DEBUG nova.network.neutron [-] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.919829] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Successfully created port: 236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1189.041030] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c0d601-7e3b-4b75-a3fa-a4c416c6ea09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.049412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f2b987-ecd9-4adf-9095-c90c6de6aac0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.055107] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1189.105918] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbef8f7a-bf47-437d-b33f-333e5bbc0d0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.113833] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d754662-6749-44ea-b505-24c5e39bceaf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.120290] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Updating instance_info_cache with network_info: [{"id": "7feb60bd-3eed-4a64-b356-4d949eb60a7e", "address": "fa:16:3e:61:58:09", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7feb60bd-3e", "ovs_interfaceid": "7feb60bd-3eed-4a64-b356-4d949eb60a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.132756] env[69992]: DEBUG nova.compute.provider_tree [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.146808] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897505, 'name': ReconfigVM_Task, 'duration_secs': 0.302792} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.147305] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 408de352-797c-40c2-86bc-359e01c5c04e/408de352-797c-40c2-86bc-359e01c5c04e.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.148122] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1d2b7b2-c704-4edb-976f-b6fb34c2a43d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.155252] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1189.155252] env[69992]: value = "task-2897506" [ 1189.155252] env[69992]: _type = "Task" [ 1189.155252] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.166226] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897506, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.344030] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1189.344700] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582033', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'name': 'volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b4da2ab-d026-45d8-8234-79ddd84d5cbb', 'attached_at': '', 'detached_at': '', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'serial': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1189.345918] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560705fc-aa0e-425e-85e4-3ce9f641d319 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.367464] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eee0585-b2d4-4418-a2c0-1fb1917f2709 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.393532] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5/volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.393853] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03121629-2f70-4a8d-bb44-381aded6cec5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.415417] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1189.415417] env[69992]: value = "task-2897507" [ 1189.415417] env[69992]: _type = "Task" [ 1189.415417] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.427105] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897507, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.586605] env[69992]: DEBUG nova.compute.manager [req-66ec5a61-7f0f-44e2-8b45-2f4fdf84f59e req-b3d70c87-e596-487d-b517-b57f79666e32 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Received event network-vif-deleted-b39fa912-b02a-4764-8cc8-f79e08d575c6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.586805] env[69992]: INFO nova.compute.manager [req-66ec5a61-7f0f-44e2-8b45-2f4fdf84f59e req-b3d70c87-e596-487d-b517-b57f79666e32 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Neutron deleted interface b39fa912-b02a-4764-8cc8-f79e08d575c6; detaching it from the instance and deleting it from the info cache [ 1189.586975] env[69992]: DEBUG nova.network.neutron [req-66ec5a61-7f0f-44e2-8b45-2f4fdf84f59e req-b3d70c87-e596-487d-b517-b57f79666e32 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.622076] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "refresh_cache-a35dd590-b5ff-4878-8aa5-8797814d8779" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.622410] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Instance network_info: |[{"id": "7feb60bd-3eed-4a64-b356-4d949eb60a7e", "address": "fa:16:3e:61:58:09", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7feb60bd-3e", "ovs_interfaceid": "7feb60bd-3eed-4a64-b356-4d949eb60a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1189.623050] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:58:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7feb60bd-3eed-4a64-b356-4d949eb60a7e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1189.630444] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1189.630907] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1189.631158] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4ddae86-40f9-45b6-b1db-24d101cbd7ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.650587] env[69992]: DEBUG nova.scheduler.client.report [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.660354] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1189.660354] env[69992]: value = "task-2897508" [ 1189.660354] env[69992]: _type = "Task" [ 1189.660354] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.666784] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897506, 'name': Rename_Task} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.667984] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.668254] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b5218a1-b62c-47b1-b5ff-b06ad6a82df4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.674760] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897508, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.682237] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1189.682237] env[69992]: value = "task-2897509" [ 1189.682237] env[69992]: _type = "Task" [ 1189.682237] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.691748] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.926292] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.040444] env[69992]: DEBUG nova.network.neutron [-] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.068936] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1190.092762] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3ca42d0-f096-4d75-96a0-515e2bace518 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.098516] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1190.098870] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1190.099095] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1190.099393] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1190.099638] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1190.099936] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1190.100186] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1190.100389] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1190.100640] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1190.100901] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1190.101119] env[69992]: DEBUG nova.virt.hardware [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1190.102110] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0412fab9-01bc-4c09-b699-711f2ece3ccc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.110655] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae72a7d5-569d-4856-85e4-a36be29f336b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.126719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9798f539-d233-4c3a-86d9-02e3fd3b7377 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.153703] env[69992]: DEBUG nova.compute.manager [req-66ec5a61-7f0f-44e2-8b45-2f4fdf84f59e req-b3d70c87-e596-487d-b517-b57f79666e32 service nova] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Detach interface failed, port_id=b39fa912-b02a-4764-8cc8-f79e08d575c6, reason: Instance a7f01cd7-f148-48fc-a71a-5461672d6039 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1190.156809] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.159108] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.572s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.159348] env[69992]: DEBUG nova.objects.instance [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'resources' on Instance uuid bce01d14-3c1b-4dce-b61c-721e25a56497 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.175668] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897508, 'name': CreateVM_Task, 'duration_secs': 0.476371} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.176836] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1190.177750] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.178091] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.179777] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1190.179777] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62cc3094-139f-4cb1-a928-f8898d3af755 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.190984] env[69992]: INFO nova.scheduler.client.report [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Deleted allocations for instance b7a1b9e1-4d57-435f-bdb6-51481968aacb [ 1190.196866] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1190.196866] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d14279-513b-8c9a-4caf-4307511e4dba" [ 1190.196866] env[69992]: _type = "Task" [ 1190.196866] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.207137] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897509, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.214254] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d14279-513b-8c9a-4caf-4307511e4dba, 'name': SearchDatastore_Task, 'duration_secs': 0.022196} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.214634] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.214956] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1190.215304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.215614] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.215886] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.216243] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d3daad5-a9bc-4cba-8b58-55dff9c45990 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.228279] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.228566] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1190.229868] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d3e8a2c-9118-4b82-9313-b9f792db823a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.237419] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1190.237419] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec0e6c-aa5d-6200-29ec-2afee5677670" [ 1190.237419] env[69992]: _type = "Task" [ 1190.237419] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.246366] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec0e6c-aa5d-6200-29ec-2afee5677670, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.426676] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897507, 'name': ReconfigVM_Task, 'duration_secs': 0.694453} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.427570] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfigured VM instance instance-00000037 to attach disk [datastore2] volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5/volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.432163] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36874f61-bb48-405b-8dd3-fed0cf91edcd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.450310] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1190.450310] env[69992]: value = "task-2897510" [ 1190.450310] env[69992]: _type = "Task" [ 1190.450310] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.458378] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897510, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.544449] env[69992]: INFO nova.compute.manager [-] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Took 1.69 seconds to deallocate network for instance. [ 1190.708141] env[69992]: DEBUG oslo_vmware.api [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897509, 'name': PowerOnVM_Task, 'duration_secs': 0.599496} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.708611] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7d50e91e-df61-4b05-97fd-613215f17d6c tempest-ServersAdminNegativeTestJSON-2035378228 tempest-ServersAdminNegativeTestJSON-2035378228-project-member] Lock "b7a1b9e1-4d57-435f-bdb6-51481968aacb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.420s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.709495] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1190.710188] env[69992]: INFO nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Took 16.22 seconds to spawn the instance on the hypervisor. [ 1190.710188] env[69992]: DEBUG nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.711110] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d035b77-aa2c-41ef-8897-cdcf61155e67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.752386] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec0e6c-aa5d-6200-29ec-2afee5677670, 'name': SearchDatastore_Task, 'duration_secs': 0.009598} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.753801] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e225a2e7-bd4c-4ff9-b511-5637edc1818b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.762783] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1190.762783] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526758cf-c51e-5ed2-0392-711df25ca706" [ 1190.762783] env[69992]: _type = "Task" [ 1190.762783] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.774615] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526758cf-c51e-5ed2-0392-711df25ca706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.861776] env[69992]: DEBUG nova.compute.manager [req-5a5c4695-0118-4eaa-8ad7-7e98b6984f80 req-fa2972ad-a0db-403d-b808-3fa2bfd44005 service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Received event network-vif-plugged-236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1190.862392] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a5c4695-0118-4eaa-8ad7-7e98b6984f80 req-fa2972ad-a0db-403d-b808-3fa2bfd44005 service nova] Acquiring lock "033d667f-5511-4254-a7e2-f8a2a94178d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.862392] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a5c4695-0118-4eaa-8ad7-7e98b6984f80 req-fa2972ad-a0db-403d-b808-3fa2bfd44005 service nova] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.862638] env[69992]: DEBUG oslo_concurrency.lockutils [req-5a5c4695-0118-4eaa-8ad7-7e98b6984f80 req-fa2972ad-a0db-403d-b808-3fa2bfd44005 service nova] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.862879] env[69992]: DEBUG nova.compute.manager [req-5a5c4695-0118-4eaa-8ad7-7e98b6984f80 req-fa2972ad-a0db-403d-b808-3fa2bfd44005 service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] No waiting events found dispatching network-vif-plugged-236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1190.863149] env[69992]: WARNING nova.compute.manager [req-5a5c4695-0118-4eaa-8ad7-7e98b6984f80 req-fa2972ad-a0db-403d-b808-3fa2bfd44005 service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Received unexpected event network-vif-plugged-236e1657-89f3-43f3-9baf-d126ebdaac2e for instance with vm_state building and task_state spawning. [ 1190.961952] env[69992]: DEBUG oslo_vmware.api [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897510, 'name': ReconfigVM_Task, 'duration_secs': 0.224008} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.962287] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582033', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'name': 'volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b4da2ab-d026-45d8-8234-79ddd84d5cbb', 'attached_at': '', 'detached_at': '', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'serial': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1191.052794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.106674] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ee9421-48d8-4dea-9f5c-5ad2270d1ffe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.115054] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a921c68-25e9-43d8-ab13-c5f423b9fe99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.144451] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.144702] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.146697] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54808835-225f-4f59-a506-cb409063c48b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.154363] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef2ddb7-020c-42c8-abfe-7817be191112 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.170734] env[69992]: DEBUG nova.compute.provider_tree [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.235051] env[69992]: INFO nova.compute.manager [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Took 49.53 seconds to build instance. [ 1191.274763] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526758cf-c51e-5ed2-0392-711df25ca706, 'name': SearchDatastore_Task, 'duration_secs': 0.038795} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.275065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.275343] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a35dd590-b5ff-4878-8aa5-8797814d8779/a35dd590-b5ff-4878-8aa5-8797814d8779.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1191.275640] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-779b6693-8e3d-4dd4-b0f4-b91643d8dec0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.282713] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1191.282713] env[69992]: value = "task-2897511" [ 1191.282713] env[69992]: _type = "Task" [ 1191.282713] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.293558] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.294542] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Successfully updated port: 236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1191.643183] env[69992]: DEBUG nova.compute.manager [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Received event network-changed-236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.643183] env[69992]: DEBUG nova.compute.manager [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Refreshing instance network info cache due to event network-changed-236e1657-89f3-43f3-9baf-d126ebdaac2e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1191.643183] env[69992]: DEBUG oslo_concurrency.lockutils [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] Acquiring lock "refresh_cache-033d667f-5511-4254-a7e2-f8a2a94178d1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.643183] env[69992]: DEBUG oslo_concurrency.lockutils [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] Acquired lock "refresh_cache-033d667f-5511-4254-a7e2-f8a2a94178d1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.643183] env[69992]: DEBUG nova.network.neutron [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Refreshing network info cache for port 236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.650309] env[69992]: INFO nova.compute.manager [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Detaching volume 964072e4-b1a4-47ae-8221-dfb900c2f8b1 [ 1191.674186] env[69992]: DEBUG nova.scheduler.client.report [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.691548] env[69992]: INFO nova.virt.block_device [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Attempting to driver detach volume 964072e4-b1a4-47ae-8221-dfb900c2f8b1 from mountpoint /dev/sdb [ 1191.691791] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1191.692127] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581990', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'name': 'volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'dd31269e-716c-44cd-9fc3-ce227fe5b3b2', 'attached_at': '', 'detached_at': '', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'serial': '964072e4-b1a4-47ae-8221-dfb900c2f8b1'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1191.692976] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efad593-e903-4c3f-a489-1c3dffb9f274 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.720853] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b656ea-37d9-4d15-9a48-c31d748ef9cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.728346] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fde541-faf5-4668-be27-3acbe36f558d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.753775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a9c3aa7e-2266-42c6-ad0c-e0fdad71579b tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "408de352-797c-40c2-86bc-359e01c5c04e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.060s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.755168] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebac75e-f820-459a-b51d-af714ad585dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.778721] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] The volume has not been displaced from its original location: [datastore1] volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1/volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1191.784618] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfiguring VM instance instance-0000001f to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1191.785845] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d248e59-67c6-4f6e-8ab3-38b2cfa3f289 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.806875] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "refresh_cache-033d667f-5511-4254-a7e2-f8a2a94178d1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.812448] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897511, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.814363] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1191.814363] env[69992]: value = "task-2897512" [ 1191.814363] env[69992]: _type = "Task" [ 1191.814363] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.829586] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897512, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.003480] env[69992]: DEBUG nova.objects.instance [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'flavor' on Instance uuid 1b4da2ab-d026-45d8-8234-79ddd84d5cbb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.179772] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.021s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.183019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.003s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.183019] env[69992]: DEBUG nova.objects.instance [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lazy-loading 'resources' on Instance uuid 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.198915] env[69992]: DEBUG nova.network.neutron [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1192.206427] env[69992]: INFO nova.scheduler.client.report [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance bce01d14-3c1b-4dce-b61c-721e25a56497 [ 1192.264052] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "408de352-797c-40c2-86bc-359e01c5c04e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.264331] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "408de352-797c-40c2-86bc-359e01c5c04e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.264538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "408de352-797c-40c2-86bc-359e01c5c04e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.264718] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "408de352-797c-40c2-86bc-359e01c5c04e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.264906] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "408de352-797c-40c2-86bc-359e01c5c04e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.267153] env[69992]: INFO nova.compute.manager [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Terminating instance [ 1192.293351] env[69992]: DEBUG nova.network.neutron [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.298063] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897511, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580813} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.298649] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] a35dd590-b5ff-4878-8aa5-8797814d8779/a35dd590-b5ff-4878-8aa5-8797814d8779.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1192.298878] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1192.299158] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1b5aa33-3b50-42e5-9104-1aa6a23ea044 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.306815] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1192.306815] env[69992]: value = "task-2897513" [ 1192.306815] env[69992]: _type = "Task" [ 1192.306815] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.316034] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.324724] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897512, 'name': ReconfigVM_Task, 'duration_secs': 0.238537} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.325060] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Reconfigured VM instance instance-0000001f to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1192.330108] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fbb6217-f2be-40ff-bd95-4cb4853e2136 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.345906] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1192.345906] env[69992]: value = "task-2897514" [ 1192.345906] env[69992]: _type = "Task" [ 1192.345906] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.354545] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897514, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.509840] env[69992]: DEBUG oslo_concurrency.lockutils [None req-824cc91d-0dc6-4fa3-b1a6-af92aefd9112 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.311s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.682900] env[69992]: INFO nova.compute.manager [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Rescuing [ 1192.683160] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.683343] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.683513] env[69992]: DEBUG nova.network.neutron [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.714862] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dc32f1b3-35bd-415f-9c59-b0e642a211d9 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "bce01d14-3c1b-4dce-b61c-721e25a56497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.565s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.777081] env[69992]: DEBUG nova.compute.manager [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1192.777885] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.779355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0762398-0955-4bb0-9fed-b6e818a66244 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.791101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1192.791101] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b71e6858-bd37-451d-8c92-c22ac213f98e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.799945] env[69992]: DEBUG oslo_concurrency.lockutils [req-f1459ec1-0612-4fa4-9588-7484b5e72a16 req-7b547925-02af-4357-bd03-ddb6af15128f service nova] Releasing lock "refresh_cache-033d667f-5511-4254-a7e2-f8a2a94178d1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.800113] env[69992]: DEBUG oslo_vmware.api [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1192.800113] env[69992]: value = "task-2897515" [ 1192.800113] env[69992]: _type = "Task" [ 1192.800113] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.801012] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "refresh_cache-033d667f-5511-4254-a7e2-f8a2a94178d1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.801012] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.819570] env[69992]: DEBUG oslo_vmware.api [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897515, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.826019] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066573} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.826019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.826019] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edff7d9f-1545-404d-87a8-87fba78bf017 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.849358] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] a35dd590-b5ff-4878-8aa5-8797814d8779/a35dd590-b5ff-4878-8aa5-8797814d8779.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.855257] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e62a9ce-763f-457a-adc1-e20953ca3f36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.883311] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897514, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.887197] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1192.887197] env[69992]: value = "task-2897516" [ 1192.887197] env[69992]: _type = "Task" [ 1192.887197] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.895818] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.184792] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc14e7ed-bcdd-4ffa-92e6-36a9b9070e41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.196327] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb1faa2-e208-497b-bf51-163e671dbc25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.252159] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cbb215-380f-4883-acb9-33f9f6cc479d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.260484] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c302abf5-32a4-43a9-a142-2fe7ade0ae95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.276177] env[69992]: DEBUG nova.compute.provider_tree [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.310643] env[69992]: DEBUG oslo_vmware.api [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897515, 'name': PowerOffVM_Task, 'duration_secs': 0.19499} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.311256] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.311432] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.311681] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e5c5c3b-7848-4746-8e91-060d50e591c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.363579] env[69992]: DEBUG oslo_vmware.api [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897514, 'name': ReconfigVM_Task, 'duration_secs': 0.809093} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.365157] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1193.369056] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581990', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'name': 'volume-964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'dd31269e-716c-44cd-9fc3-ce227fe5b3b2', 'attached_at': '', 'detached_at': '', 'volume_id': '964072e4-b1a4-47ae-8221-dfb900c2f8b1', 'serial': '964072e4-b1a4-47ae-8221-dfb900c2f8b1'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1193.376278] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.376278] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.376278] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleting the datastore file [datastore2] 408de352-797c-40c2-86bc-359e01c5c04e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.376278] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58b1d367-aec1-467a-8fe3-8d849776370b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.382372] env[69992]: DEBUG oslo_vmware.api [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1193.382372] env[69992]: value = "task-2897518" [ 1193.382372] env[69992]: _type = "Task" [ 1193.382372] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.393207] env[69992]: DEBUG oslo_vmware.api [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.403617] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897516, 'name': ReconfigVM_Task, 'duration_secs': 0.319724} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.404305] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Reconfigured VM instance instance-00000049 to attach disk [datastore2] a35dd590-b5ff-4878-8aa5-8797814d8779/a35dd590-b5ff-4878-8aa5-8797814d8779.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.405342] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5269fd7a-cdd7-483e-badd-ef4cbeaf9907 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.413616] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1193.413616] env[69992]: value = "task-2897519" [ 1193.413616] env[69992]: _type = "Task" [ 1193.413616] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.422842] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897519, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.512361] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "953c0e0d-3279-444c-b631-6ebbf24e5487" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.512618] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.718424] env[69992]: DEBUG nova.network.neutron [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Updating instance_info_cache with network_info: [{"id": "236e1657-89f3-43f3-9baf-d126ebdaac2e", "address": "fa:16:3e:39:5d:17", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap236e1657-89", "ovs_interfaceid": "236e1657-89f3-43f3-9baf-d126ebdaac2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.730830] env[69992]: DEBUG nova.network.neutron [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.779154] env[69992]: DEBUG nova.scheduler.client.report [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.892760] env[69992]: DEBUG oslo_vmware.api [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155457} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.893545] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1193.894068] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1193.894068] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1193.894218] env[69992]: INFO nova.compute.manager [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1193.896225] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.896225] env[69992]: DEBUG nova.compute.manager [-] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1193.896225] env[69992]: DEBUG nova.network.neutron [-] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1193.924158] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897519, 'name': Rename_Task, 'duration_secs': 0.150426} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.924884] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1193.926016] env[69992]: DEBUG nova.objects.instance [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'flavor' on Instance uuid dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.927257] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-758189fd-dcb9-43d3-9cae-3472491099ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.933300] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1193.933300] env[69992]: value = "task-2897520" [ 1193.933300] env[69992]: _type = "Task" [ 1193.933300] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.940895] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.014812] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1194.221189] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "refresh_cache-033d667f-5511-4254-a7e2-f8a2a94178d1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.221522] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Instance network_info: |[{"id": "236e1657-89f3-43f3-9baf-d126ebdaac2e", "address": "fa:16:3e:39:5d:17", "network": {"id": "5acf28f9-5cff-441b-b890-520965c956f3", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1147240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6090a2d03daf46e9b687d24fde64fb72", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap236e1657-89", "ovs_interfaceid": "236e1657-89f3-43f3-9baf-d126ebdaac2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1194.221960] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:5d:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '236e1657-89f3-43f3-9baf-d126ebdaac2e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1194.232871] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1194.233912] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.238503] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1194.239553] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc46ac60-5dc4-46c4-ae6c-93d6dbaada42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.267495] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1194.267495] env[69992]: value = "task-2897521" [ 1194.267495] env[69992]: _type = "Task" [ 1194.267495] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.276492] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897521, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.284757] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.287385] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.712s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.287959] env[69992]: DEBUG nova.objects.instance [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1194.324592] env[69992]: INFO nova.scheduler.client.report [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Deleted allocations for instance 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e [ 1194.414178] env[69992]: DEBUG nova.compute.manager [req-78b36d39-2873-4ad6-a45e-19afd142dca0 req-d5f8017f-0ba4-4236-82ff-da1d25aec1d4 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Received event network-vif-deleted-da4c3835-de23-4fe6-804e-cfd3dc1580cc {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.414434] env[69992]: INFO nova.compute.manager [req-78b36d39-2873-4ad6-a45e-19afd142dca0 req-d5f8017f-0ba4-4236-82ff-da1d25aec1d4 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Neutron deleted interface da4c3835-de23-4fe6-804e-cfd3dc1580cc; detaching it from the instance and deleting it from the info cache [ 1194.414644] env[69992]: DEBUG nova.network.neutron [req-78b36d39-2873-4ad6-a45e-19afd142dca0 req-d5f8017f-0ba4-4236-82ff-da1d25aec1d4 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.443197] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.540389] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.567888] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquiring lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.568242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.568552] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquiring lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.569026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.569026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.572040] env[69992]: INFO nova.compute.manager [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Terminating instance [ 1194.791426] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897521, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.802847] env[69992]: DEBUG nova.network.neutron [-] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.837290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b2109fdf-7f0a-4a12-8744-89658b8f3d87 tempest-ServersWithSpecificFlavorTestJSON-1381962467 tempest-ServersWithSpecificFlavorTestJSON-1381962467-project-member] Lock "4cd9fb91-44f1-4304-a2bf-c8b294b19e0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.729s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.920132] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44c28236-ed24-4054-b28e-dc38fa2d8b70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.933913] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b58ec323-cd25-4845-9b28-5565d5fa28c8 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.789s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.939486] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66de0ff2-7206-42a0-ac08-56fad48c3125 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.968822] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897520, 'name': PowerOnVM_Task, 'duration_secs': 0.65268} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.969040] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1194.969253] env[69992]: INFO nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Took 8.70 seconds to spawn the instance on the hypervisor. [ 1194.969438] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.970294] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e923d89-b2a1-4ad0-9c5b-c16329c57b29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.994092] env[69992]: DEBUG nova.compute.manager [req-78b36d39-2873-4ad6-a45e-19afd142dca0 req-d5f8017f-0ba4-4236-82ff-da1d25aec1d4 service nova] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Detach interface failed, port_id=da4c3835-de23-4fe6-804e-cfd3dc1580cc, reason: Instance 408de352-797c-40c2-86bc-359e01c5c04e could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1195.076952] env[69992]: DEBUG nova.compute.manager [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1195.077829] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1195.077829] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90de8647-d88b-4bb8-a357-31e43f9c2e7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.086543] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1195.086543] env[69992]: value = "task-2897522" [ 1195.086543] env[69992]: _type = "Task" [ 1195.086543] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.097903] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897522, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.281577] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897521, 'name': CreateVM_Task, 'duration_secs': 0.514796} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.281577] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1195.281577] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.281577] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.281577] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1195.281577] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8ac4bf2-361e-4d4a-9192-dcc8f7683382 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.286586] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1195.286586] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527e014c-7b97-f318-c34c-a43e58da5fc4" [ 1195.286586] env[69992]: _type = "Task" [ 1195.286586] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.301910] env[69992]: DEBUG oslo_concurrency.lockutils [None req-dd9824f2-08a8-40bb-a274-0bc7fd6d1fd1 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.305663] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527e014c-7b97-f318-c34c-a43e58da5fc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.305663] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.976s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.305663] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.308518] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.477s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.308808] env[69992]: DEBUG nova.objects.instance [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lazy-loading 'resources' on Instance uuid c1c90aa6-922d-4315-8ead-2263a55a5d6e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.310782] env[69992]: INFO nova.compute.manager [-] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Took 1.42 seconds to deallocate network for instance. [ 1195.331467] env[69992]: INFO nova.scheduler.client.report [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleted allocations for instance 0e8163d9-6ff5-4f1e-af33-ccb42fa46750 [ 1195.507025] env[69992]: INFO nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Took 41.55 seconds to build instance. [ 1195.596452] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897522, 'name': PowerOffVM_Task, 'duration_secs': 0.20241} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.596725] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.597010] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1195.597240] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581910', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'name': 'volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af07ebd0-5f12-49c3-a518-95be9a8d6c82', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'serial': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1195.598060] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2de78db-31ea-41f0-9548-3aa3046b3080 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.618107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.618365] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.618618] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.619071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.619071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.621386] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6581f5-4dff-4ef1-bb99-59509ff67a6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.624720] env[69992]: INFO nova.compute.manager [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Terminating instance [ 1195.635090] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dd5b9b-603b-453a-8ed6-d3aa726ef565 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.654268] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7c7f87-096a-4b30-aafa-dd952947f0ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.672933] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] The volume has not been displaced from its original location: [datastore2] volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4/volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1195.678225] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1195.678567] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22e76fc1-20d8-42d4-9d18-574bc46cbe93 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.698524] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1195.698524] env[69992]: value = "task-2897523" [ 1195.698524] env[69992]: _type = "Task" [ 1195.698524] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.707548] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897523, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.803872] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527e014c-7b97-f318-c34c-a43e58da5fc4, 'name': SearchDatastore_Task, 'duration_secs': 0.029029} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.804201] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.804484] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1195.805076] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.805076] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.805076] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1195.805582] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08c74edc-d598-4eaa-90be-4e2a98f747f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.818545] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1195.818545] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1195.818545] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04ac6a0e-2aa3-445c-8506-723efc3d16f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.821344] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.826408] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1195.826408] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52222d6a-5ff1-3be3-cfd6-f035d913adf6" [ 1195.826408] env[69992]: _type = "Task" [ 1195.826408] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.843438] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52222d6a-5ff1-3be3-cfd6-f035d913adf6, 'name': SearchDatastore_Task, 'duration_secs': 0.010927} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.844096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8946386b-4b51-4ed9-8f41-4e03bab78bce tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "0e8163d9-6ff5-4f1e-af33-ccb42fa46750" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.176s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.846045] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b125e3d-5549-4883-a490-0dfe9c1d3624 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.855123] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1195.855123] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b6fde4-972a-3885-7a2e-2af2751746a0" [ 1195.855123] env[69992]: _type = "Task" [ 1195.855123] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.865549] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b6fde4-972a-3885-7a2e-2af2751746a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.917036] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1195.917495] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ca75698-244d-48ac-8cf8-d300bd0f9d65 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.929479] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1195.929479] env[69992]: value = "task-2897524" [ 1195.929479] env[69992]: _type = "Task" [ 1195.929479] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.938301] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.009324] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.065s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.130525] env[69992]: DEBUG nova.compute.manager [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1196.130525] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.130697] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3590db-9104-472b-9024-225b697aac5b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.141307] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.144728] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c8ce96e-a7b5-4edf-ac0b-0cd819754def {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.151423] env[69992]: DEBUG oslo_vmware.api [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1196.151423] env[69992]: value = "task-2897525" [ 1196.151423] env[69992]: _type = "Task" [ 1196.151423] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.160764] env[69992]: DEBUG oslo_vmware.api [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897525, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.210872] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897523, 'name': ReconfigVM_Task, 'duration_secs': 0.285858} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.211166] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1196.216170] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeeb7a1b-c210-4de7-9992-fa1f045125bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.239141] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1196.239141] env[69992]: value = "task-2897526" [ 1196.239141] env[69992]: _type = "Task" [ 1196.239141] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.249667] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.368262] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b6fde4-972a-3885-7a2e-2af2751746a0, 'name': SearchDatastore_Task, 'duration_secs': 0.011447} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.369566] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.369732] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 033d667f-5511-4254-a7e2-f8a2a94178d1/033d667f-5511-4254-a7e2-f8a2a94178d1.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1196.370888] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3187c363-a33c-458c-8cd2-63642e89ae42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.377030] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dba791cd-bca4-4c75-b127-19ecf61b361d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.383201] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b71825-7848-4db2-93c0-c67ba7e530ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.387777] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1196.387777] env[69992]: value = "task-2897527" [ 1196.387777] env[69992]: _type = "Task" [ 1196.387777] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.418785] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd27c49-e66c-40e3-a4ff-a1e7df9c4b77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.424559] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.430267] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6657e65f-2a69-48bc-887d-a31c111320de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.447736] env[69992]: DEBUG nova.compute.provider_tree [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.452257] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.661495] env[69992]: DEBUG oslo_vmware.api [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897525, 'name': PowerOffVM_Task, 'duration_secs': 0.20624} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.661847] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.661882] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.662140] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7a75183-d1dd-4249-b54c-9a323663d8be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.732580] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.732808] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.732944] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleting the datastore file [datastore2] dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.733223] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d26002d-0f96-440a-bc48-4ada90d97836 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.735379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "1d436762-964d-40d9-871e-ee33c3ba25b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.735546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.735727] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.735920] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.736119] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.740070] env[69992]: INFO nova.compute.manager [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Terminating instance [ 1196.747175] env[69992]: DEBUG oslo_vmware.api [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1196.747175] env[69992]: value = "task-2897529" [ 1196.747175] env[69992]: _type = "Task" [ 1196.747175] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.756617] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.761971] env[69992]: DEBUG oslo_vmware.api [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.898304] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.950262] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.954266] env[69992]: DEBUG nova.scheduler.client.report [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1197.243676] env[69992]: DEBUG nova.compute.manager [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1197.244543] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1197.247114] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751cc2bf-39d2-404f-bc69-e112d342daa0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.274192] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897526, 'name': ReconfigVM_Task, 'duration_secs': 0.811908} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.280488] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581910', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'name': 'volume-fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af07ebd0-5f12-49c3-a518-95be9a8d6c82', 'attached_at': '', 'detached_at': '', 'volume_id': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4', 'serial': 'fbb68063-47ce-447c-a9bc-94fbbe5c17f4'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1197.280717] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1197.281144] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.281433] env[69992]: DEBUG oslo_vmware.api [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310643} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.282426] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5d1002-1f1c-4ddb-838b-69acb1c03859 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.285659] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e8f57b8-3134-4338-83ad-6d7b6a934d14 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.287674] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.287887] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1197.288178] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1197.288361] env[69992]: INFO nova.compute.manager [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1197.288648] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1197.288972] env[69992]: DEBUG nova.compute.manager [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1197.289071] env[69992]: DEBUG nova.network.neutron [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.299058] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1197.300606] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f159e3c-3712-4118-8706-59d8f24a9d9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.302722] env[69992]: DEBUG oslo_vmware.api [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1197.302722] env[69992]: value = "task-2897530" [ 1197.302722] env[69992]: _type = "Task" [ 1197.302722] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.316261] env[69992]: DEBUG oslo_vmware.api [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.385975] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1197.386386] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1197.386632] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Deleting the datastore file [datastore2] af07ebd0-5f12-49c3-a518-95be9a8d6c82 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.386967] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e60c4e94-6995-4895-90b5-4f36d146c1be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.397833] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for the task: (returnval){ [ 1197.397833] env[69992]: value = "task-2897532" [ 1197.397833] env[69992]: _type = "Task" [ 1197.397833] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.401688] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897527, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.99911} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.405340] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 033d667f-5511-4254-a7e2-f8a2a94178d1/033d667f-5511-4254-a7e2-f8a2a94178d1.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1197.405615] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1197.405956] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91dea8ac-2644-4965-9534-f2349edc94fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.414168] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.414679] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1197.414679] env[69992]: value = "task-2897533" [ 1197.414679] env[69992]: _type = "Task" [ 1197.414679] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.423569] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897533, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.445169] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897524, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.460349] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.152s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.463872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.242s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.463872] env[69992]: DEBUG nova.objects.instance [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lazy-loading 'resources' on Instance uuid a06d4b38-0e39-46ef-a588-7627661cb201 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.482597] env[69992]: INFO nova.scheduler.client.report [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleted allocations for instance c1c90aa6-922d-4315-8ead-2263a55a5d6e [ 1197.772945] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.773202] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.813242] env[69992]: DEBUG oslo_vmware.api [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897530, 'name': PowerOffVM_Task, 'duration_secs': 0.221606} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.813540] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1197.813711] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1197.813958] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59c4c967-3187-4156-a082-26dff8a00690 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.885966] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1197.886098] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1197.886291] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleting the datastore file [datastore1] 1d436762-964d-40d9-871e-ee33c3ba25b5 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.886626] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecf0223b-4520-4caa-934e-0a7c5bc29a70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.893514] env[69992]: DEBUG oslo_vmware.api [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for the task: (returnval){ [ 1197.893514] env[69992]: value = "task-2897535" [ 1197.893514] env[69992]: _type = "Task" [ 1197.893514] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.904013] env[69992]: DEBUG oslo_vmware.api [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.926752] env[69992]: DEBUG oslo_vmware.api [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Task: {'id': task-2897532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13576} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.926752] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.926752] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1197.926752] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1197.926752] env[69992]: INFO nova.compute.manager [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Took 2.85 seconds to destroy the instance on the hypervisor. [ 1197.926752] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1197.926752] env[69992]: DEBUG nova.compute.manager [-] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1197.926752] env[69992]: DEBUG nova.network.neutron [-] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.931401] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897533, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.932406] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.936041] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a05f9e-1f26-4300-9b0e-45af370d11bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.960736] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 033d667f-5511-4254-a7e2-f8a2a94178d1/033d667f-5511-4254-a7e2-f8a2a94178d1.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.962573] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-547c236d-77a7-42f4-8240-2cd9b9eba151 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.984292] env[69992]: DEBUG nova.compute.manager [req-a4edb49f-0170-4149-b466-83a302c79b2c req-abe8fc44-ab6e-470d-9618-8a4a3bb37b34 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Received event network-vif-deleted-d325d681-8643-43a2-93dd-d4687ad115f5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1197.984739] env[69992]: INFO nova.compute.manager [req-a4edb49f-0170-4149-b466-83a302c79b2c req-abe8fc44-ab6e-470d-9618-8a4a3bb37b34 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Neutron deleted interface d325d681-8643-43a2-93dd-d4687ad115f5; detaching it from the instance and deleting it from the info cache [ 1197.985073] env[69992]: DEBUG nova.network.neutron [req-a4edb49f-0170-4149-b466-83a302c79b2c req-abe8fc44-ab6e-470d-9618-8a4a3bb37b34 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.997195] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897524, 'name': PowerOffVM_Task, 'duration_secs': 1.532318} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.997195] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e0858535-8f81-4e68-b0fc-d6e8b76b0ad6 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "c1c90aa6-922d-4315-8ead-2263a55a5d6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.565s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.004695] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.004695] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23dde42-4f66-43cd-8c25-dbacb3b426c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.009194] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1198.009194] env[69992]: value = "task-2897536" [ 1198.009194] env[69992]: _type = "Task" [ 1198.009194] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.043050] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc940631-4999-491e-8598-de2b8ce23ea5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.044947] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.081508] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.081816] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9020397-6530-44a2-b2c7-3f205239200d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.092619] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1198.092619] env[69992]: value = "task-2897537" [ 1198.092619] env[69992]: _type = "Task" [ 1198.092619] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.105834] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1198.105974] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1198.106242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.106364] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.106640] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.107044] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2efce496-573c-45b2-aeaf-8f815aca692d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.118133] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.118133] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1198.118757] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c7d440e-ba39-4460-903e-7e52d1fe2fcc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.127278] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1198.127278] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52558089-df3e-3661-73d5-88f4974b354d" [ 1198.127278] env[69992]: _type = "Task" [ 1198.127278] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.136336] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52558089-df3e-3661-73d5-88f4974b354d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.276772] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1198.340920] env[69992]: DEBUG nova.network.neutron [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.409182] env[69992]: DEBUG oslo_vmware.api [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Task: {'id': task-2897535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289951} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.412230] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.412230] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1198.412230] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.412230] env[69992]: INFO nova.compute.manager [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1198.412230] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.412230] env[69992]: DEBUG nova.compute.manager [-] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.412230] env[69992]: DEBUG nova.network.neutron [-] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.497680] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3e1d8f1-dc8f-4e44-b537-1f9387867ff4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.512681] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3f0cb1-525c-4c9c-9b83-6741e2f4c586 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.572125] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.572125] env[69992]: DEBUG nova.compute.manager [req-a4edb49f-0170-4149-b466-83a302c79b2c req-abe8fc44-ab6e-470d-9618-8a4a3bb37b34 service nova] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Detach interface failed, port_id=d325d681-8643-43a2-93dd-d4687ad115f5, reason: Instance dd31269e-716c-44cd-9fc3-ce227fe5b3b2 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1198.604136] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e35aad-3d9b-41bc-8e02-0d73a30ae91a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.612240] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09fcfbd-4427-4a57-b1cd-3134bfb6bb59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.647700] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fd72f5-a4a4-46ea-a30a-0cbe2b62eb91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.655419] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52558089-df3e-3661-73d5-88f4974b354d, 'name': SearchDatastore_Task, 'duration_secs': 0.010757} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.657833] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c684d7-1bca-42fc-b6c2-95105529a991 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.660795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc9109e-ee34-4135-a97f-80f3bc4b25c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.667286] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1198.667286] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52aadc9c-3b5d-0c06-1930-a52038c64f38" [ 1198.667286] env[69992]: _type = "Task" [ 1198.667286] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.675045] env[69992]: DEBUG nova.compute.provider_tree [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.683598] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52aadc9c-3b5d-0c06-1930-a52038c64f38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.686903] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.687136] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.687337] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.687512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.687673] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.693114] env[69992]: INFO nova.compute.manager [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Terminating instance [ 1198.797145] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.846342] env[69992]: INFO nova.compute.manager [-] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Took 1.56 seconds to deallocate network for instance. [ 1199.036561] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897536, 'name': ReconfigVM_Task, 'duration_secs': 0.60087} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.036934] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 033d667f-5511-4254-a7e2-f8a2a94178d1/033d667f-5511-4254-a7e2-f8a2a94178d1.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.037732] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4e71a4a-047e-4332-b5de-e6c0fc602759 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.044948] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1199.044948] env[69992]: value = "task-2897538" [ 1199.044948] env[69992]: _type = "Task" [ 1199.044948] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.054378] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897538, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.180510] env[69992]: DEBUG nova.scheduler.client.report [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1199.190707] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52aadc9c-3b5d-0c06-1930-a52038c64f38, 'name': SearchDatastore_Task, 'duration_secs': 0.032373} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.191237] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.192089] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. {{(pid=69992) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1199.192407] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d3556ac-92be-4e7c-afb0-f9f87a56e429 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.198532] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "refresh_cache-4e93b655-aaf4-49b8-bbb2-92287ec15bbc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.198711] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquired lock "refresh_cache-4e93b655-aaf4-49b8-bbb2-92287ec15bbc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.198969] env[69992]: DEBUG nova.network.neutron [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1199.201760] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1199.201760] env[69992]: value = "task-2897539" [ 1199.201760] env[69992]: _type = "Task" [ 1199.201760] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.214422] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.354951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.360487] env[69992]: DEBUG nova.network.neutron [-] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.388580] env[69992]: DEBUG nova.network.neutron [-] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.558512] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897538, 'name': Rename_Task, 'duration_secs': 0.262326} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.560256] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.560593] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-354cbfcb-04ec-46eb-9c80-239eccdee434 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.572648] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1199.572648] env[69992]: value = "task-2897540" [ 1199.572648] env[69992]: _type = "Task" [ 1199.572648] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.579835] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.686673] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.689249] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.464s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.690925] env[69992]: INFO nova.compute.claims [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1199.710601] env[69992]: INFO nova.scheduler.client.report [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Deleted allocations for instance a06d4b38-0e39-46ef-a588-7627661cb201 [ 1199.726408] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897539, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.740315] env[69992]: DEBUG nova.network.neutron [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1199.844838] env[69992]: DEBUG nova.network.neutron [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.864814] env[69992]: INFO nova.compute.manager [-] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Took 1.45 seconds to deallocate network for instance. [ 1199.901426] env[69992]: INFO nova.compute.manager [-] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Took 1.97 seconds to deallocate network for instance. [ 1200.046444] env[69992]: DEBUG nova.compute.manager [req-d2905db3-f4af-4946-a7a3-2fa6e56d64fd req-ce1e7424-3c20-4425-8a9d-29574e85c919 service nova] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Received event network-vif-deleted-2584dc71-913f-4c9b-922c-f8b28530b82f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.046619] env[69992]: DEBUG nova.compute.manager [req-d2905db3-f4af-4946-a7a3-2fa6e56d64fd req-ce1e7424-3c20-4425-8a9d-29574e85c919 service nova] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Received event network-vif-deleted-bd75002a-c4e8-4f29-99ff-b6f5055c068d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.083128] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897540, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.218603] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897539, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.886621} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.218913] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk. [ 1200.219810] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e21afac-a607-413a-8dac-762bfeeb2701 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.226509] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2dd90d06-9e7a-4b65-860a-88386a0c8953 tempest-SecurityGroupsTestJSON-1379750779 tempest-SecurityGroupsTestJSON-1379750779-project-member] Lock "a06d4b38-0e39-46ef-a588-7627661cb201" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.447s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.254183] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1200.255144] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9998b2f8-f5f3-4de1-99a5-3d853d41b83e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.277217] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1200.277217] env[69992]: value = "task-2897541" [ 1200.277217] env[69992]: _type = "Task" [ 1200.277217] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.287988] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.346107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Releasing lock "refresh_cache-4e93b655-aaf4-49b8-bbb2-92287ec15bbc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.346544] env[69992]: DEBUG nova.compute.manager [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1200.346773] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1200.347676] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33123e3-3db1-43ce-8f50-7277449979e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.354859] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1200.355106] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57a8a4dd-b6af-4e9e-8ef0-4dbe51b5ecc2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.361415] env[69992]: DEBUG oslo_vmware.api [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1200.361415] env[69992]: value = "task-2897542" [ 1200.361415] env[69992]: _type = "Task" [ 1200.361415] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.368822] env[69992]: DEBUG oslo_vmware.api [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.371824] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.461841] env[69992]: INFO nova.compute.manager [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Took 0.56 seconds to detach 1 volumes for instance. [ 1200.464157] env[69992]: DEBUG nova.compute.manager [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Deleting volume: fbb68063-47ce-447c-a9bc-94fbbe5c17f4 {{(pid=69992) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1200.584247] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897540, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.788185] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897541, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.872870] env[69992]: DEBUG oslo_vmware.api [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897542, 'name': PowerOffVM_Task, 'duration_secs': 0.230631} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.873167] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1200.873346] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1200.873584] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d8cfbe6-9684-4426-a8ea-52254b02d4e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.904925] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1200.905851] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1200.905851] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleting the datastore file [datastore2] 4e93b655-aaf4-49b8-bbb2-92287ec15bbc {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1200.905851] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73c24c0e-f5e5-4833-a04f-8db8e95cbab2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.920353] env[69992]: DEBUG oslo_vmware.api [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for the task: (returnval){ [ 1200.920353] env[69992]: value = "task-2897545" [ 1200.920353] env[69992]: _type = "Task" [ 1200.920353] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.931264] env[69992]: DEBUG oslo_vmware.api [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.007828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.086188] env[69992]: DEBUG oslo_vmware.api [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897540, 'name': PowerOnVM_Task, 'duration_secs': 1.102676} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.086460] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.086663] env[69992]: INFO nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Took 11.02 seconds to spawn the instance on the hypervisor. [ 1201.086840] env[69992]: DEBUG nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1201.087821] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece6aade-1878-461a-8c6e-f1a24b6cd206 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.117847] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3704f421-563c-44fb-ba2d-96c7dbb46ce7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.131442] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd18564-5fa5-4115-b18d-3b6e10471e20 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.169918] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7545db92-581a-4b63-b029-b6fe4bcb314f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.178970] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207b884a-2ce8-41df-a078-ff1deebea881 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.196072] env[69992]: DEBUG nova.compute.provider_tree [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.287141] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897541, 'name': ReconfigVM_Task, 'duration_secs': 0.738002} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.287357] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb-rescue.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1201.288257] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02950ef3-6deb-41e5-bf52-591313133794 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.318935] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f98402b4-0c69-4bd0-ad0f-2b92e22f4210 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.338301] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1201.338301] env[69992]: value = "task-2897546" [ 1201.338301] env[69992]: _type = "Task" [ 1201.338301] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.347816] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897546, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.429745] env[69992]: DEBUG oslo_vmware.api [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Task: {'id': task-2897545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27133} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.429984] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.430293] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1201.430484] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1201.430681] env[69992]: INFO nova.compute.manager [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1201.430958] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1201.431248] env[69992]: DEBUG nova.compute.manager [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1201.431353] env[69992]: DEBUG nova.network.neutron [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1201.446810] env[69992]: DEBUG nova.network.neutron [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1201.609531] env[69992]: INFO nova.compute.manager [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Took 47.63 seconds to build instance. [ 1201.698941] env[69992]: DEBUG nova.scheduler.client.report [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1201.854000] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897546, 'name': ReconfigVM_Task, 'duration_secs': 0.491009} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.854000] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1201.854000] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3594aa98-0e1f-4fff-a126-9d0e3161a809 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.866728] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1201.866728] env[69992]: value = "task-2897547" [ 1201.866728] env[69992]: _type = "Task" [ 1201.866728] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.877840] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897547, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.949596] env[69992]: DEBUG nova.network.neutron [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.111984] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8c7a53b-6083-47f6-be78-b5b13fd037be tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.143s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.208022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.208022] env[69992]: DEBUG nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1202.208900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.596s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.213135] env[69992]: INFO nova.compute.claims [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1202.256665] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "a35dd590-b5ff-4878-8aa5-8797814d8779" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.256943] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.257352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "a35dd590-b5ff-4878-8aa5-8797814d8779-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.257352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.257514] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.262880] env[69992]: INFO nova.compute.manager [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Terminating instance [ 1202.361757] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "033d667f-5511-4254-a7e2-f8a2a94178d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.362016] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.362294] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "033d667f-5511-4254-a7e2-f8a2a94178d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.362480] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.363442] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.365327] env[69992]: INFO nova.compute.manager [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Terminating instance [ 1202.379692] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897547, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.452689] env[69992]: INFO nova.compute.manager [-] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Took 1.02 seconds to deallocate network for instance. [ 1202.715687] env[69992]: DEBUG nova.compute.utils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1202.720355] env[69992]: DEBUG nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1202.771294] env[69992]: DEBUG nova.compute.manager [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1202.771294] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1202.771294] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89973cdf-1d4c-4b7e-b1c8-ab86e847a39b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.783029] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1202.783029] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7c4c690-26ea-4345-b5fe-d3ebb6466574 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.789836] env[69992]: DEBUG oslo_vmware.api [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1202.789836] env[69992]: value = "task-2897548" [ 1202.789836] env[69992]: _type = "Task" [ 1202.789836] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.800374] env[69992]: DEBUG oslo_vmware.api [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.872673] env[69992]: DEBUG nova.compute.manager [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1202.872898] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1202.873808] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e05ebf-6e48-4e47-8500-c7b05ecea478 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.886879] env[69992]: DEBUG oslo_vmware.api [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897547, 'name': PowerOnVM_Task, 'duration_secs': 0.801273} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.890273] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1202.892014] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1202.893102] env[69992]: DEBUG nova.compute.manager [None req-ce37afd9-b4b2-4a29-b1af-541979678529 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.893342] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da56d6dd-98f4-4abf-8920-7320a1adb3e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.895648] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7656b46-8790-4f21-bb09-4b4ec1f50368 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.907526] env[69992]: DEBUG oslo_vmware.api [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1202.907526] env[69992]: value = "task-2897549" [ 1202.907526] env[69992]: _type = "Task" [ 1202.907526] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.915523] env[69992]: DEBUG oslo_vmware.api [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.960175] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.226341] env[69992]: DEBUG nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1203.301168] env[69992]: DEBUG oslo_vmware.api [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897548, 'name': PowerOffVM_Task, 'duration_secs': 0.275565} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.301429] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.301601] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.301851] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aee08eb5-c42b-44ba-bbae-9f5babbee067 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.364240] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.364876] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.364876] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleting the datastore file [datastore2] a35dd590-b5ff-4878-8aa5-8797814d8779 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.365041] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5aa8424-5e39-4715-b736-7b21b5545689 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.372489] env[69992]: DEBUG oslo_vmware.api [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1203.372489] env[69992]: value = "task-2897551" [ 1203.372489] env[69992]: _type = "Task" [ 1203.372489] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.386621] env[69992]: DEBUG oslo_vmware.api [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.430916] env[69992]: DEBUG oslo_vmware.api [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897549, 'name': PowerOffVM_Task, 'duration_secs': 0.170102} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.432123] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.432456] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.432650] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2872062c-fea6-43a1-813b-2b43065da646 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.495971] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.495971] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.495971] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleting the datastore file [datastore2] 033d667f-5511-4254-a7e2-f8a2a94178d1 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.495971] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25750a46-c71e-4fcb-b442-403586fef00a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.501259] env[69992]: DEBUG oslo_vmware.api [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for the task: (returnval){ [ 1203.501259] env[69992]: value = "task-2897553" [ 1203.501259] env[69992]: _type = "Task" [ 1203.501259] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.509535] env[69992]: DEBUG oslo_vmware.api [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.725858] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81db2b4f-4659-4a7c-a85d-21e98be8583f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.734444] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6344bec5-428f-4346-930c-d3b1902a47f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.779377] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7dcd181-bbd9-4a1e-bd48-fbb90e9964c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.787644] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50988cd-ba04-43db-aca9-93ae4a8488c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.802912] env[69992]: DEBUG nova.compute.provider_tree [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1203.888241] env[69992]: DEBUG oslo_vmware.api [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25173} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.888492] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1203.888752] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1203.888888] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1203.890022] env[69992]: INFO nova.compute.manager [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1203.890022] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1203.890022] env[69992]: DEBUG nova.compute.manager [-] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1203.890022] env[69992]: DEBUG nova.network.neutron [-] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1204.011052] env[69992]: DEBUG oslo_vmware.api [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Task: {'id': task-2897553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273375} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.011410] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.011576] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1204.011662] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1204.011795] env[69992]: INFO nova.compute.manager [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1204.012066] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1204.012455] env[69992]: DEBUG nova.compute.manager [-] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1204.012455] env[69992]: DEBUG nova.network.neutron [-] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1204.125619] env[69992]: INFO nova.compute.manager [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Unrescuing [ 1204.126283] env[69992]: DEBUG oslo_concurrency.lockutils [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.126931] env[69992]: DEBUG oslo_concurrency.lockutils [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.126931] env[69992]: DEBUG nova.network.neutron [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1204.247018] env[69992]: DEBUG nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1204.279620] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.279903] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.280092] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.280287] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.280481] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.280577] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.280777] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.280931] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.281099] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.281269] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.281443] env[69992]: DEBUG nova.virt.hardware [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.282308] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3775c09e-bdc5-4a6a-a4b7-467cde3adeed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.290938] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7166dd-1c47-4994-bb36-59cf3cbd6e25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.308115] env[69992]: DEBUG nova.scheduler.client.report [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.311412] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.318870] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Creating folder: Project (3c5cbfa6303f4825bd7fe1b9ff6a80d6). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1204.321830] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d204a472-3be2-4185-9e7e-470b3180271a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.334803] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Created folder: Project (3c5cbfa6303f4825bd7fe1b9ff6a80d6) in parent group-v581821. [ 1204.334995] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Creating folder: Instances. Parent ref: group-v582036. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1204.335290] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d57922c-1fed-47fa-8360-9c7a5c25093d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.346085] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Created folder: Instances in parent group-v582036. [ 1204.346085] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1204.346614] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1204.347744] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25ec915c-4011-44b4-844b-f39926eb42d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.372572] env[69992]: DEBUG nova.compute.manager [req-142b247b-2fd5-40d4-ba79-43c16047e521 req-9dda3e84-8cf8-4a0f-a4a1-9d516e6dbed7 service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Received event network-vif-deleted-7feb60bd-3eed-4a64-b356-4d949eb60a7e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.372572] env[69992]: INFO nova.compute.manager [req-142b247b-2fd5-40d4-ba79-43c16047e521 req-9dda3e84-8cf8-4a0f-a4a1-9d516e6dbed7 service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Neutron deleted interface 7feb60bd-3eed-4a64-b356-4d949eb60a7e; detaching it from the instance and deleting it from the info cache [ 1204.374194] env[69992]: DEBUG nova.network.neutron [req-142b247b-2fd5-40d4-ba79-43c16047e521 req-9dda3e84-8cf8-4a0f-a4a1-9d516e6dbed7 service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.379691] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.379691] env[69992]: value = "task-2897556" [ 1204.379691] env[69992]: _type = "Task" [ 1204.379691] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.390700] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897556, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.821212] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.821901] env[69992]: DEBUG nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1204.826321] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.033s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.826321] env[69992]: DEBUG nova.objects.instance [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lazy-loading 'resources' on Instance uuid 37751af7-267e-4693-aaa3-cd1bb9c3d950 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.869907] env[69992]: DEBUG nova.network.neutron [-] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.874687] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78771315-d01a-45fa-be66-8d7270c92cdd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.889379] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c97b0ad-d2bb-4d91-99ce-277f598afa1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.907239] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897556, 'name': CreateVM_Task, 'duration_secs': 0.306947} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.907239] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1204.907239] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.907239] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.907239] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1204.907239] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fabebb65-0d08-47d0-b2fb-ce67f806080a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.911577] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1204.911577] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d15699-6797-acbb-4855-d5862b5c3c18" [ 1204.911577] env[69992]: _type = "Task" [ 1204.911577] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.926779] env[69992]: DEBUG nova.compute.manager [req-142b247b-2fd5-40d4-ba79-43c16047e521 req-9dda3e84-8cf8-4a0f-a4a1-9d516e6dbed7 service nova] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Detach interface failed, port_id=7feb60bd-3eed-4a64-b356-4d949eb60a7e, reason: Instance a35dd590-b5ff-4878-8aa5-8797814d8779 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1204.932157] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d15699-6797-acbb-4855-d5862b5c3c18, 'name': SearchDatastore_Task, 'duration_secs': 0.009943} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.932500] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.932743] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1204.932977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.933129] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.933303] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1204.933562] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4476ea65-e7ee-490e-a650-2f209abaa2ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.941833] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1204.941952] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1204.943030] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2af413e1-139d-4a14-b7f6-467f47d7361b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.948335] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1204.948335] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5232489a-6fcb-69f8-fc1b-2caed2bc0074" [ 1204.948335] env[69992]: _type = "Task" [ 1204.948335] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.957171] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5232489a-6fcb-69f8-fc1b-2caed2bc0074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.006747] env[69992]: DEBUG nova.network.neutron [-] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.235706] env[69992]: DEBUG nova.network.neutron [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.329701] env[69992]: DEBUG nova.compute.utils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1205.335352] env[69992]: DEBUG nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1205.372981] env[69992]: INFO nova.compute.manager [-] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Took 1.48 seconds to deallocate network for instance. [ 1205.462689] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5232489a-6fcb-69f8-fc1b-2caed2bc0074, 'name': SearchDatastore_Task, 'duration_secs': 0.015254} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.463530] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95f4d91b-6f3f-4abc-b060-6b0df01c535a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.468975] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1205.468975] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525a2ad7-56d5-375c-039b-ef29d93a2dd6" [ 1205.468975] env[69992]: _type = "Task" [ 1205.468975] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.476639] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525a2ad7-56d5-375c-039b-ef29d93a2dd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.511720] env[69992]: INFO nova.compute.manager [-] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Took 1.50 seconds to deallocate network for instance. [ 1205.743329] env[69992]: DEBUG oslo_concurrency.lockutils [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.744097] env[69992]: DEBUG nova.objects.instance [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'flavor' on Instance uuid 1b4da2ab-d026-45d8-8234-79ddd84d5cbb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.810488] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d295fa-78b6-4299-8a01-9b155a505648 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.820319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d14f1a-1c07-4778-b2ca-a7a3586fafea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.850860] env[69992]: DEBUG nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1205.856069] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05f6666-0ebe-4f20-84b9-0902eaba5225 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.862465] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcc4608-9a0f-4838-ac66-2310abb99481 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.877738] env[69992]: DEBUG nova.compute.provider_tree [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.883169] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.982018] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525a2ad7-56d5-375c-039b-ef29d93a2dd6, 'name': SearchDatastore_Task, 'duration_secs': 0.034564} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.982018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.982018] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1205.982018] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d4b6ebb-8294-4137-935c-3166cae0c449 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.990161] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1205.990161] env[69992]: value = "task-2897557" [ 1205.990161] env[69992]: _type = "Task" [ 1205.990161] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.999372] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.022120] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.253553] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a413af18-bcf7-41eb-a0e9-df4a8f82b722 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.279982] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.280335] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62c0e6c5-2937-44fd-8d7a-3f0988330d67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.288132] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1206.288132] env[69992]: value = "task-2897558" [ 1206.288132] env[69992]: _type = "Task" [ 1206.288132] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.297506] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.381844] env[69992]: DEBUG nova.scheduler.client.report [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.458260] env[69992]: DEBUG nova.compute.manager [req-d7f726b6-0161-4460-8fa7-d0fabd95ab67 req-aff8b507-ec2d-4b63-a35f-3064915afa53 service nova] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Received event network-vif-deleted-236e1657-89f3-43f3-9baf-d126ebdaac2e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1206.503214] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897557, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.800440] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897558, 'name': PowerOffVM_Task, 'duration_secs': 0.504535} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.800924] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1206.807176] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfiguring VM instance instance-00000037 to detach disk 2002 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1206.807176] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6e9efa3-41dd-4c58-b2f2-3f515cf466ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.827504] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1206.827504] env[69992]: value = "task-2897559" [ 1206.827504] env[69992]: _type = "Task" [ 1206.827504] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.836679] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897559, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.864956] env[69992]: DEBUG oslo_concurrency.lockutils [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.865352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.865579] env[69992]: DEBUG nova.objects.instance [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'flavor' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.870358] env[69992]: DEBUG nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1206.890258] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.066s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.893362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 23.399s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.914502] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1206.915271] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1206.915734] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1206.916879] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1206.917522] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1206.918016] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1206.922024] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1206.922024] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1206.922024] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1206.922024] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1206.922024] env[69992]: DEBUG nova.virt.hardware [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1206.922024] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8534c3e-2736-4836-bfe3-6392f5533d1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.928083] env[69992]: INFO nova.scheduler.client.report [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Deleted allocations for instance 37751af7-267e-4693-aaa3-cd1bb9c3d950 [ 1206.946061] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40a0da9-c61e-4297-852f-a732ff5eba92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.964811] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1206.970841] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Creating folder: Project (4590766b58d0444a9f7bea19a4d07cb8). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1206.973686] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53e2e69b-6bd6-45a7-af6d-414b918d296a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.982392] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Created folder: Project (4590766b58d0444a9f7bea19a4d07cb8) in parent group-v581821. [ 1206.982777] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Creating folder: Instances. Parent ref: group-v582039. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1206.984018] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-694e247e-2a99-42f2-b504-c55ec593191c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.999022] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Created folder: Instances in parent group-v582039. [ 1206.999022] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1206.999022] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1206.999521] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dfb0409-282a-44df-a514-2e0dd52ced1c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.015711] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704061} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.016469] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1207.017014] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1207.017242] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9eac903-9f70-4aba-acac-435c140d30ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.022603] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.022603] env[69992]: value = "task-2897562" [ 1207.022603] env[69992]: _type = "Task" [ 1207.022603] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.024505] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1207.024505] env[69992]: value = "task-2897563" [ 1207.024505] env[69992]: _type = "Task" [ 1207.024505] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.032111] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897562, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.035204] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897563, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.338419] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897559, 'name': ReconfigVM_Task, 'duration_secs': 0.429862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.338419] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfigured VM instance instance-00000037 to detach disk 2002 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1207.338419] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1207.338419] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cfa2ec5-5684-4126-9448-32d12454b4cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.349455] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1207.349455] env[69992]: value = "task-2897564" [ 1207.349455] env[69992]: _type = "Task" [ 1207.349455] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.359568] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.373335] env[69992]: DEBUG nova.objects.instance [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'pci_requests' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.441964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-125fef09-aade-473a-aae8-70ebe333dc11 tempest-ServerRescueTestJSONUnderV235-1436920537 tempest-ServerRescueTestJSONUnderV235-1436920537-project-member] Lock "37751af7-267e-4693-aaa3-cd1bb9c3d950" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.151s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.539518] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897562, 'name': CreateVM_Task, 'duration_secs': 0.266174} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.539704] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1207.543082] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.543279] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.543608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1207.543891] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897563, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066097} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.544118] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af7a0ebf-543e-4188-b147-1b30ecbffd98 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.546643] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1207.547517] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951dce5d-fc3c-43ad-ad2c-57bce9afac36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.556596] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1207.556596] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ba8304-637a-69bc-6ec0-eb48711e1162" [ 1207.556596] env[69992]: _type = "Task" [ 1207.556596] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.574077] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1207.582256] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0368aafd-3d7b-47cc-8f31-afd94ae0aabd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.604294] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ba8304-637a-69bc-6ec0-eb48711e1162, 'name': SearchDatastore_Task, 'duration_secs': 0.043384} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.604764] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.605019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1207.605259] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.605400] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.605572] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1207.605961] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1207.605961] env[69992]: value = "task-2897565" [ 1207.605961] env[69992]: _type = "Task" [ 1207.605961] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.606120] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8a6a6f5-e1a9-4ecd-b76f-0a5f973174da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.619012] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.623098] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1207.623754] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1207.624325] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fa54c13-4fe7-422f-b5d5-710a5dc24f30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.629979] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1207.629979] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5283bc7c-cbcc-84e9-293d-51fec0c08c87" [ 1207.629979] env[69992]: _type = "Task" [ 1207.629979] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.643380] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5283bc7c-cbcc-84e9-293d-51fec0c08c87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.864128] env[69992]: DEBUG oslo_vmware.api [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897564, 'name': PowerOnVM_Task, 'duration_secs': 0.449224} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.864426] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.864671] env[69992]: DEBUG nova.compute.manager [None req-91d57f3b-b4d5-41f6-b1ee-333e2f4de760 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1207.865564] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac285ea4-dbce-433a-b173-c3998efa8327 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.877257] env[69992]: DEBUG nova.objects.base [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Object Instance<5f98a2aa-eb7b-41d2-9e9f-14cee9445942> lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1207.877257] env[69992]: DEBUG nova.network.neutron [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1207.889466] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb80353-65d9-40b4-9593-a0cb784af2f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.897154] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c701373-7ef2-4ad1-af85-e890ecdd1989 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.933996] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c8c1f3-b401-456d-9596-3c08eba4a968 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.943048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b4470e-9579-4e64-9e10-d88c7d6ca708 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.961277] env[69992]: DEBUG nova.compute.provider_tree [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.048162] env[69992]: DEBUG oslo_concurrency.lockutils [None req-20e300b7-0524-46a8-ba52-1c306b899d21 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.183s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.055356] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "7fa33d98-20b7-4162-a354-24cfea17701f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.056565] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "7fa33d98-20b7-4162-a354-24cfea17701f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.117755] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897565, 'name': ReconfigVM_Task, 'duration_secs': 0.405544} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.119034] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1208.119034] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9a29891-df77-4053-a132-bcc7453736cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.125364] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1208.125364] env[69992]: value = "task-2897566" [ 1208.125364] env[69992]: _type = "Task" [ 1208.125364] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.136359] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897566, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.149390] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5283bc7c-cbcc-84e9-293d-51fec0c08c87, 'name': SearchDatastore_Task, 'duration_secs': 0.018595} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.152121] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9188097c-3951-4c14-a892-5cee814bc215 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.163193] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1208.163193] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b6e0c2-cb22-7bf8-9c2e-cbf998abf823" [ 1208.163193] env[69992]: _type = "Task" [ 1208.163193] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.176426] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b6e0c2-cb22-7bf8-9c2e-cbf998abf823, 'name': SearchDatastore_Task, 'duration_secs': 0.014049} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.176768] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.177318] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1208.177658] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d738ac94-1638-47b7-ab3e-61722822dd8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.184899] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1208.184899] env[69992]: value = "task-2897567" [ 1208.184899] env[69992]: _type = "Task" [ 1208.184899] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.196469] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.464177] env[69992]: DEBUG nova.scheduler.client.report [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.561563] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1208.637223] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897566, 'name': Rename_Task, 'duration_secs': 0.182847} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.638349] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.639125] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1491c288-fd82-490b-92d3-a10e305630d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.648021] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1208.648021] env[69992]: value = "task-2897568" [ 1208.648021] env[69992]: _type = "Task" [ 1208.648021] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.659731] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897568, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.695890] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897567, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.095432] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.162021] env[69992]: DEBUG oslo_vmware.api [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897568, 'name': PowerOnVM_Task, 'duration_secs': 0.499879} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.162021] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1209.162021] env[69992]: INFO nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Took 4.92 seconds to spawn the instance on the hypervisor. [ 1209.162021] env[69992]: DEBUG nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1209.163172] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1555ac-ffce-4ef3-9359-a2b6b73a56eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.196955] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.942267} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.196955] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1209.197255] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1209.197886] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb7cbc22-6b79-417d-8e6c-6c6ed0ec8ee0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.204538] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1209.204538] env[69992]: value = "task-2897569" [ 1209.204538] env[69992]: _type = "Task" [ 1209.204538] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.214132] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.481595] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.588s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.484390] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.563s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.484541] env[69992]: DEBUG nova.objects.instance [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lazy-loading 'resources' on Instance uuid a49b4721-e338-4e60-b91e-137caa3c9c03 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.683459] env[69992]: INFO nova.compute.manager [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Took 33.49 seconds to build instance. [ 1209.717750] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123566} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.718575] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1209.719072] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2250f2e4-331e-4335-a23c-2bbe07e61a25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.742159] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1209.742746] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3383f12b-abe0-425f-9cdb-65c0818aa3aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.764208] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1209.764208] env[69992]: value = "task-2897570" [ 1209.764208] env[69992]: _type = "Task" [ 1209.764208] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.773206] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897570, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.810581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "7fc7c481-75e8-40f2-a971-752ce6dde59b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.810878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.811229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "7fc7c481-75e8-40f2-a971-752ce6dde59b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.811398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.811593] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.813594] env[69992]: INFO nova.compute.manager [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Terminating instance [ 1210.066304] env[69992]: INFO nova.scheduler.client.report [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocation for migration a5252a08-401e-4a46-9c0d-2521390462d4 [ 1210.185567] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4901de6-4a63-4e08-b08a-2c98e2ae6c08 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "451a8af1-a4a2-4c2d-932c-58955491433b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.002s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.277528] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.319156] env[69992]: DEBUG nova.compute.manager [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1210.319156] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1210.319517] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b9732d-a505-4cad-aae6-ac708792f595 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.331327] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1210.333055] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b818219d-1dd1-4b93-9ceb-0b562850711d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.338849] env[69992]: DEBUG oslo_vmware.api [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1210.338849] env[69992]: value = "task-2897571" [ 1210.338849] env[69992]: _type = "Task" [ 1210.338849] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.350905] env[69992]: DEBUG oslo_vmware.api [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.421875] env[69992]: DEBUG nova.compute.manager [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1210.422233] env[69992]: DEBUG nova.compute.manager [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing instance network info cache due to event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1210.422561] env[69992]: DEBUG oslo_concurrency.lockutils [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.422748] env[69992]: DEBUG oslo_concurrency.lockutils [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.422966] env[69992]: DEBUG nova.network.neutron [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1210.486467] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2550b71-64a3-4904-a136-e8d4920019a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.495525] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ec46cf-6dc0-4299-8e10-3fde434cce75 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.527129] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.527483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.527871] env[69992]: DEBUG nova.objects.instance [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'flavor' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.530209] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df42205-faf3-4a7e-a7ba-3fce0a3e113f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.539022] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234db742-0893-473b-94a6-a30b0b545c51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.554834] env[69992]: DEBUG nova.compute.provider_tree [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1210.573227] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0e8c4f98-0fd1-411a-94c6-ceac9f823a69 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 30.439s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.651885] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.652134] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.777868] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897570, 'name': ReconfigVM_Task, 'duration_secs': 0.718862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.777868] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1210.778178] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65104bad-8a96-4847-b844-782736c4d229 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.785490] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1210.785490] env[69992]: value = "task-2897572" [ 1210.785490] env[69992]: _type = "Task" [ 1210.785490] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.793072] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897572, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.848261] env[69992]: DEBUG oslo_vmware.api [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897571, 'name': PowerOffVM_Task, 'duration_secs': 0.220312} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.849032] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.849032] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.849032] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1538a41f-6378-4410-945a-4cdea04ce585 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.075146] env[69992]: ERROR nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [req-f40e557d-4c54-4614-8851-349e1b658cb1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f40e557d-4c54-4614-8851-349e1b658cb1"}]} [ 1211.095702] env[69992]: DEBUG nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1211.111816] env[69992]: DEBUG nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1211.112053] env[69992]: DEBUG nova.compute.provider_tree [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1211.126984] env[69992]: DEBUG nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1211.153728] env[69992]: DEBUG nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1211.157817] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1211.220662] env[69992]: DEBUG nova.objects.instance [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'pci_requests' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.264885] env[69992]: DEBUG nova.network.neutron [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updated VIF entry in instance network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1211.265463] env[69992]: DEBUG nova.network.neutron [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.294657] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897572, 'name': Rename_Task, 'duration_secs': 0.397485} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.297062] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.297465] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84147e4e-97b3-467d-b22a-d0a7f4dae0c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.304984] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1211.304984] env[69992]: value = "task-2897574" [ 1211.304984] env[69992]: _type = "Task" [ 1211.304984] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.314950] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.577529] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efdb627-7be4-4e0d-88b0-cb5bced74e70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.585352] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4101385a-f985-424f-a3d4-8792f13db5c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.621328] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178435cd-2ec0-46f6-b267-684d486a5e0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.625347] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1211.625347] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1211.625347] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Deleting the datastore file [datastore1] 7fc7c481-75e8-40f2-a971-752ce6dde59b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1211.625347] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aca8543c-8e28-430f-98e6-e9677bb624fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.632870] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44938783-d9ab-41d9-9bd6-28275acd7aa4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.636596] env[69992]: DEBUG oslo_vmware.api [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for the task: (returnval){ [ 1211.636596] env[69992]: value = "task-2897575" [ 1211.636596] env[69992]: _type = "Task" [ 1211.636596] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.648103] env[69992]: DEBUG nova.compute.provider_tree [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1211.654493] env[69992]: DEBUG oslo_vmware.api [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.678169] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.723085] env[69992]: DEBUG nova.objects.base [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Object Instance<5f98a2aa-eb7b-41d2-9e9f-14cee9445942> lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1211.723301] env[69992]: DEBUG nova.network.neutron [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1211.765993] env[69992]: DEBUG nova.policy [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1211.770021] env[69992]: DEBUG oslo_concurrency.lockutils [req-d359d100-6dc3-4a0a-be77-e6f70b13e5c5 req-2377877e-fa82-4954-ae9f-a590b6b04bbe service nova] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.816233] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897574, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.035789] env[69992]: DEBUG nova.network.neutron [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Successfully created port: 4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1212.148739] env[69992]: DEBUG oslo_vmware.api [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Task: {'id': task-2897575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168509} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.149343] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1212.149650] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1212.149933] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1212.150357] env[69992]: INFO nova.compute.manager [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1212.151951] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1212.155466] env[69992]: DEBUG nova.compute.manager [-] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1212.155598] env[69992]: DEBUG nova.network.neutron [-] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1212.194461] env[69992]: DEBUG nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1212.194978] env[69992]: DEBUG nova.compute.provider_tree [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 121 to 122 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1212.195280] env[69992]: DEBUG nova.compute.provider_tree [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1212.331179] env[69992]: DEBUG oslo_vmware.api [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897574, 'name': PowerOnVM_Task, 'duration_secs': 0.660231} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.331179] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1212.331179] env[69992]: INFO nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Took 5.46 seconds to spawn the instance on the hypervisor. [ 1212.331179] env[69992]: DEBUG nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1212.334460] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76cfdca-40d4-41ca-b93a-9844c093570d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.416921] env[69992]: INFO nova.compute.manager [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Rebuilding instance [ 1212.462568] env[69992]: DEBUG nova.compute.manager [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1212.463423] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8718b0e1-f8d8-40c9-b49e-5ae5b67c04dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.701758] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.217s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.704326] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.355s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.705898] env[69992]: INFO nova.compute.claims [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1212.729407] env[69992]: INFO nova.scheduler.client.report [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted allocations for instance a49b4721-e338-4e60-b91e-137caa3c9c03 [ 1212.805953] env[69992]: DEBUG nova.compute.manager [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1212.806186] env[69992]: DEBUG nova.compute.manager [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing instance network info cache due to event network-changed-789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1212.806402] env[69992]: DEBUG oslo_concurrency.lockutils [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] Acquiring lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.806545] env[69992]: DEBUG oslo_concurrency.lockutils [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] Acquired lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.806705] env[69992]: DEBUG nova.network.neutron [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Refreshing network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1212.855838] env[69992]: INFO nova.compute.manager [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Took 34.26 seconds to build instance. [ 1212.990792] env[69992]: DEBUG nova.compute.manager [req-224dc6b7-686c-4962-8963-c091b7a9e3a8 req-67db44e2-9091-4361-9d94-5ec66f77f2a4 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Received event network-vif-deleted-9a92d7a7-73b6-4bd0-b812-3af4be317ae5 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1212.991030] env[69992]: INFO nova.compute.manager [req-224dc6b7-686c-4962-8963-c091b7a9e3a8 req-67db44e2-9091-4361-9d94-5ec66f77f2a4 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Neutron deleted interface 9a92d7a7-73b6-4bd0-b812-3af4be317ae5; detaching it from the instance and deleting it from the info cache [ 1212.991215] env[69992]: DEBUG nova.network.neutron [req-224dc6b7-686c-4962-8963-c091b7a9e3a8 req-67db44e2-9091-4361-9d94-5ec66f77f2a4 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.175069] env[69992]: DEBUG nova.network.neutron [-] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.224827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "9df7b187-e579-41b0-9d24-be2a1ae93079" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.224931] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.225142] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.225329] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.225498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.227344] env[69992]: INFO nova.compute.manager [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Terminating instance [ 1213.236138] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a971e623-d252-4ad9-a7c0-fad9926571fb tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "a49b4721-e338-4e60-b91e-137caa3c9c03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.796s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.358191] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3d2f149b-4ec8-4b8f-a1dd-a7bb40e82fac tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.770s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.476662] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.476993] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5cc8d94-d061-4b9a-952c-b4835a14d2f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.485393] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1213.485393] env[69992]: value = "task-2897576" [ 1213.485393] env[69992]: _type = "Task" [ 1213.485393] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.496747] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897576, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.496747] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84cee759-411f-430c-a500-3c5db99f347d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.512713] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f5a61b-e9fd-4360-8efa-286dd9ed47ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.544742] env[69992]: DEBUG nova.compute.manager [req-224dc6b7-686c-4962-8963-c091b7a9e3a8 req-67db44e2-9091-4361-9d94-5ec66f77f2a4 service nova] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Detach interface failed, port_id=9a92d7a7-73b6-4bd0-b812-3af4be317ae5, reason: Instance 7fc7c481-75e8-40f2-a971-752ce6dde59b could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1213.678666] env[69992]: INFO nova.compute.manager [-] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Took 1.52 seconds to deallocate network for instance. [ 1213.730358] env[69992]: DEBUG nova.compute.manager [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1213.730583] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.731494] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17570411-efef-445f-b6d2-a1b83daa5dc9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.742108] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1213.742108] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ab7c810-df28-41e0-9697-ca1b07318b6f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.755301] env[69992]: DEBUG oslo_vmware.api [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1213.755301] env[69992]: value = "task-2897577" [ 1213.755301] env[69992]: _type = "Task" [ 1213.755301] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.760660] env[69992]: DEBUG oslo_vmware.api [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.866319] env[69992]: DEBUG nova.network.neutron [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updated VIF entry in instance network info cache for port 789f6123-167b-48dd-ae68-cfdbc1d5c78a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1213.866319] env[69992]: DEBUG nova.network.neutron [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [{"id": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "address": "fa:16:3e:ed:f2:3c", "network": {"id": "e710c9b1-06e9-45f1-88fe-251001c4f54f", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1116812669-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8217315011854468b0cc17c4dfe342f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f6123-16", "ovs_interfaceid": "789f6123-167b-48dd-ae68-cfdbc1d5c78a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.948501] env[69992]: DEBUG nova.network.neutron [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Successfully updated port: 4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1213.998036] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897576, 'name': PowerOffVM_Task, 'duration_secs': 0.124807} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.998334] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1213.998983] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.999785] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73e642b-7cbf-4575-a713-d320334448ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.009387] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.009632] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8275428a-65e4-43b4-bac9-4d5dfc222b38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.033387] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.033620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.036033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.036238] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.036410] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Deleting the datastore file [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.036831] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22fb5080-c9fe-4b51-b35e-3a06ab0edce6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.047624] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1214.047624] env[69992]: value = "task-2897579" [ 1214.047624] env[69992]: _type = "Task" [ 1214.047624] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.057894] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.117982] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2487d7f8-e192-4785-b81c-3864702aabc6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.125590] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a51d11e-2de2-4d9c-9360-73407933e83a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.156668] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33272400-7707-46b0-a2ff-1daabc1be9e1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.164177] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d01d28d-a46b-4ade-bce9-17bd2f46042f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.177439] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1214.185416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.262964] env[69992]: DEBUG oslo_vmware.api [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897577, 'name': PowerOffVM_Task, 'duration_secs': 0.254917} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.263347] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.263631] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.263866] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c287393-49ef-4c3f-9805-0f8298d3fbb6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.333899] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.334166] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.334363] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore1] 9df7b187-e579-41b0-9d24-be2a1ae93079 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.334613] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eec4146a-1b98-485f-8f11-1e60218d009a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.342071] env[69992]: DEBUG oslo_vmware.api [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1214.342071] env[69992]: value = "task-2897581" [ 1214.342071] env[69992]: _type = "Task" [ 1214.342071] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.349486] env[69992]: DEBUG oslo_vmware.api [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.369184] env[69992]: DEBUG oslo_concurrency.lockutils [req-43682e0a-132f-4565-8491-bfd042d66aef req-b5391c1f-a495-4a44-8e8a-fe30ba078950 service nova] Releasing lock "refresh_cache-1b4da2ab-d026-45d8-8234-79ddd84d5cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.451656] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.451853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.452080] env[69992]: DEBUG nova.network.neutron [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1214.537859] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1214.556870] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103645} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.557162] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1214.557361] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1214.557549] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1214.699699] env[69992]: ERROR nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [req-a1fdb3d1-401d-419b-8553-a43e80039f56] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a1fdb3d1-401d-419b-8553-a43e80039f56"}]} [ 1214.715957] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1214.729604] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1214.729909] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1214.743160] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1214.751762] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.752016] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.752291] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.752494] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.752663] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.754766] env[69992]: INFO nova.compute.manager [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Terminating instance [ 1214.761626] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1214.854058] env[69992]: DEBUG oslo_vmware.api [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179453} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.854324] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1214.854507] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1214.854686] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1214.854903] env[69992]: INFO nova.compute.manager [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1214.855098] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1214.855292] env[69992]: DEBUG nova.compute.manager [-] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1214.855389] env[69992]: DEBUG nova.network.neutron [-] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1214.998463] env[69992]: WARNING nova.network.neutron [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] 7c8e9b14-bcc2-45f2-8b37-5f478b75057e already exists in list: networks containing: ['7c8e9b14-bcc2-45f2-8b37-5f478b75057e']. ignoring it [ 1215.062691] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.129055] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32816ec4-9183-4b94-bb98-7e45672d24dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.137178] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa286d9b-1081-4e95-a9fd-8ed80d9356dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.170194] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff092419-fb47-48af-bf06-06cab78c9063 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.179052] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98249830-692f-4bb0-af49-2d378abced70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.194018] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1215.260714] env[69992]: DEBUG nova.compute.manager [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1215.260714] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1215.260714] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbc77b9-b246-4941-a1d3-0b8c10d4f767 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.272738] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1215.273035] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d706bcf-f485-4511-a98f-ecf8b32b90ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.280761] env[69992]: DEBUG oslo_vmware.api [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1215.280761] env[69992]: value = "task-2897582" [ 1215.280761] env[69992]: _type = "Task" [ 1215.280761] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.290976] env[69992]: DEBUG oslo_vmware.api [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.311457] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "dedba037-48a7-4083-925d-5f34e2a27362" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.311708] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.387773] env[69992]: DEBUG nova.network.neutron [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "address": "fa:16:3e:43:bd:3a", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ead8f7e-9a", "ovs_interfaceid": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.591201] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1215.591201] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.591201] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1215.591409] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.591573] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1215.591725] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1215.591937] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1215.592118] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1215.592471] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1215.592471] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1215.592632] env[69992]: DEBUG nova.virt.hardware [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1215.593508] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1a8ef4-6eb5-49c0-ac45-bedc09ed3d70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.602100] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db940d77-2836-4b86-aff0-b7c0af54b377 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.616408] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1215.622107] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1215.623296] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1215.624494] env[69992]: DEBUG nova.compute.manager [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-vif-plugged-4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.624692] env[69992]: DEBUG oslo_concurrency.lockutils [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.624894] env[69992]: DEBUG oslo_concurrency.lockutils [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.625073] env[69992]: DEBUG oslo_concurrency.lockutils [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.625241] env[69992]: DEBUG nova.compute.manager [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] No waiting events found dispatching network-vif-plugged-4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1215.625401] env[69992]: WARNING nova.compute.manager [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received unexpected event network-vif-plugged-4ead8f7e-9ac9-474b-9302-a618d1bf1988 for instance with vm_state active and task_state None. [ 1215.625559] env[69992]: DEBUG nova.compute.manager [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-changed-4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.625711] env[69992]: DEBUG nova.compute.manager [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing instance network info cache due to event network-changed-4ead8f7e-9ac9-474b-9302-a618d1bf1988. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1215.625874] env[69992]: DEBUG oslo_concurrency.lockutils [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.626064] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1899664f-3cb1-48a5-9c54-43fb14ec398c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.646795] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1215.646795] env[69992]: value = "task-2897583" [ 1215.646795] env[69992]: _type = "Task" [ 1215.646795] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.654255] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897583, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.724411] env[69992]: ERROR nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [req-422598a8-be86-40ed-9d9f-c5e2e9b5e5a0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-422598a8-be86-40ed-9d9f-c5e2e9b5e5a0"}]} [ 1215.729082] env[69992]: DEBUG nova.network.neutron [-] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.741337] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1215.757405] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1215.757639] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1215.769155] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1215.790820] env[69992]: DEBUG oslo_vmware.api [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897582, 'name': PowerOffVM_Task, 'duration_secs': 0.249806} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.791966] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1215.794484] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1215.794621] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1215.796259] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1112171a-787d-404c-8bd9-a86bf7a79eec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.815165] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1215.857427] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1215.857634] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1215.857816] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleting the datastore file [datastore1] 27492ef7-8258-4001-b3b3-5bcb94e12c1f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1215.858101] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83c7333e-4614-45f0-b848-9a0f05a0692d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.864820] env[69992]: DEBUG oslo_vmware.api [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for the task: (returnval){ [ 1215.864820] env[69992]: value = "task-2897585" [ 1215.864820] env[69992]: _type = "Task" [ 1215.864820] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.874962] env[69992]: DEBUG oslo_vmware.api [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.890625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.891289] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.891498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.891703] env[69992]: DEBUG oslo_concurrency.lockutils [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.891880] env[69992]: DEBUG nova.network.neutron [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing network info cache for port 4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.894239] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e91094-5ca4-4d42-868e-cbb47f28dfd0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.912946] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1215.913227] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.913395] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1215.913588] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.913726] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1215.913874] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1215.914094] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1215.914254] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1215.914444] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1215.914622] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1215.914805] env[69992]: DEBUG nova.virt.hardware [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1215.921235] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfiguring VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1215.924409] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab149eb2-485c-4ee8-a83c-865ad8349576 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.942669] env[69992]: DEBUG oslo_vmware.api [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1215.942669] env[69992]: value = "task-2897586" [ 1215.942669] env[69992]: _type = "Task" [ 1215.942669] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.953877] env[69992]: DEBUG oslo_vmware.api [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897586, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.039145] env[69992]: INFO nova.compute.manager [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Rebuilding instance [ 1216.079129] env[69992]: DEBUG nova.compute.manager [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1216.079675] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d665e5c-fa47-4325-b09a-df748d25d04f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.161516] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897583, 'name': CreateVM_Task, 'duration_secs': 0.398027} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.161709] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1216.162111] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.162335] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.162604] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1216.162863] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5d0ef10-51d0-41ed-8550-798273ed2979 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.170374] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1216.170374] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d22ad8-0abe-ad33-dbb7-61683d0dfaf6" [ 1216.170374] env[69992]: _type = "Task" [ 1216.170374] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.178843] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d22ad8-0abe-ad33-dbb7-61683d0dfaf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.228840] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280d02ab-73cd-4640-bedf-403be71a1340 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.231847] env[69992]: INFO nova.compute.manager [-] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Took 1.38 seconds to deallocate network for instance. [ 1216.240268] env[69992]: INFO nova.compute.manager [None req-64a8031c-eb06-457a-8e3d-af38dffabde6 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Get console output [ 1216.240610] env[69992]: WARNING nova.virt.vmwareapi.driver [None req-64a8031c-eb06-457a-8e3d-af38dffabde6 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] The console log is missing. Check your VSPC configuration [ 1216.242226] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1acc2e-0028-4869-8ec7-060f5dfd34ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.275989] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f409d368-806a-4a6a-8fee-83fe8b2a0f5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.283717] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464c8730-0f92-4898-b6a0-29b82877d40d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.297548] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.334119] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.375050] env[69992]: DEBUG oslo_vmware.api [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Task: {'id': task-2897585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211473} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.375234] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1216.375421] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1216.375598] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1216.375773] env[69992]: INFO nova.compute.manager [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1216.376025] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1216.376223] env[69992]: DEBUG nova.compute.manager [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1216.376317] env[69992]: DEBUG nova.network.neutron [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1216.453028] env[69992]: DEBUG oslo_vmware.api [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897586, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.681296] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d22ad8-0abe-ad33-dbb7-61683d0dfaf6, 'name': SearchDatastore_Task, 'duration_secs': 0.009827} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.681555] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.681795] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1216.682080] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.682247] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.682516] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1216.682874] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70ab44c5-a07a-4735-b405-f2c72b3e9eb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.693976] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1216.694226] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1216.695011] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e292b2c-6f71-45fc-8764-1859d9f317a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.700032] env[69992]: DEBUG nova.network.neutron [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updated VIF entry in instance network info cache for port 4ead8f7e-9ac9-474b-9302-a618d1bf1988. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1216.700436] env[69992]: DEBUG nova.network.neutron [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "address": "fa:16:3e:43:bd:3a", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ead8f7e-9a", "ovs_interfaceid": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.705926] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1216.705926] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b4c4b9-7981-1589-3bfc-c267586bc9fa" [ 1216.705926] env[69992]: _type = "Task" [ 1216.705926] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.717407] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b4c4b9-7981-1589-3bfc-c267586bc9fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.742967] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.831039] env[69992]: DEBUG nova.scheduler.client.report [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1216.831490] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 124 to 125 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1216.831801] env[69992]: DEBUG nova.compute.provider_tree [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.952422] env[69992]: DEBUG oslo_vmware.api [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897586, 'name': ReconfigVM_Task, 'duration_secs': 0.685037} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.952926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.953166] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfigured VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1217.094706] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1217.095054] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b646cc0-f4d5-4fbe-af16-237fa4ba2942 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.103053] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1217.103053] env[69992]: value = "task-2897587" [ 1217.103053] env[69992]: _type = "Task" [ 1217.103053] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.112013] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897587, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.203039] env[69992]: DEBUG oslo_concurrency.lockutils [req-fab97187-882c-4f92-b7a4-14c845a231b7 req-5fc322e1-cfe3-4b54-8d46-6d2dea8ba1c5 service nova] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.215812] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b4c4b9-7981-1589-3bfc-c267586bc9fa, 'name': SearchDatastore_Task, 'duration_secs': 0.032691} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.216638] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b52c66e-2df6-4c9d-8718-182f56198ef7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.222151] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1217.222151] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525ec13e-745d-1f1e-8290-5f6fa497704f" [ 1217.222151] env[69992]: _type = "Task" [ 1217.222151] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.230658] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525ec13e-745d-1f1e-8290-5f6fa497704f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.259183] env[69992]: DEBUG nova.network.neutron [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.338730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.634s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.339282] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1217.342310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.320s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.342523] env[69992]: DEBUG nova.objects.instance [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lazy-loading 'resources' on Instance uuid 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.457649] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ca25e06-4449-4f1b-b206-38afac83b752 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.930s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.613312] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897587, 'name': PowerOffVM_Task, 'duration_secs': 0.297227} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.613312] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1217.614027] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1217.614391] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d853b0-5ace-414b-b687-2e305c6d8e3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.620860] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1217.621103] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e25fa068-65a5-49b5-810d-2a468fcec6e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.643514] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1217.643744] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1217.643924] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Deleting the datastore file [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1217.644292] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c67b91b-f5ec-4284-8ac0-6cbb66b9e1ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.650640] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1217.650640] env[69992]: value = "task-2897589" [ 1217.650640] env[69992]: _type = "Task" [ 1217.650640] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.658520] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.734157] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525ec13e-745d-1f1e-8290-5f6fa497704f, 'name': SearchDatastore_Task, 'duration_secs': 0.025048} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.734435] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.734727] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1217.734955] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc70e580-1a61-4686-b1d1-07146c56f9f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.741475] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1217.741475] env[69992]: value = "task-2897590" [ 1217.741475] env[69992]: _type = "Task" [ 1217.741475] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.749360] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.761900] env[69992]: INFO nova.compute.manager [-] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Took 1.39 seconds to deallocate network for instance. [ 1217.843944] env[69992]: DEBUG nova.compute.utils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1217.845363] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1217.845486] env[69992]: DEBUG nova.network.neutron [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1217.886409] env[69992]: DEBUG nova.policy [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d2ee1abedf4874bcb44b4076199da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b8716c4b7324052a3472734c655655a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1217.905151] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquiring lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.905497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.905636] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquiring lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.905853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.906046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.908839] env[69992]: INFO nova.compute.manager [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Terminating instance [ 1217.916713] env[69992]: DEBUG nova.compute.manager [req-4d518a47-3b87-4477-a134-4c8056f62df7 req-6f1742af-bb0c-432b-85dd-095a24332b94 service nova] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Received event network-vif-deleted-fd0c5f07-29de-4e64-a60c-655c3da4bb9e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1217.916927] env[69992]: DEBUG nova.compute.manager [req-4d518a47-3b87-4477-a134-4c8056f62df7 req-6f1742af-bb0c-432b-85dd-095a24332b94 service nova] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Received event network-vif-deleted-617fc6d5-b33e-407b-8a59-8a6def94c1f4 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1218.163706] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086173} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.164165] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1218.164246] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1218.164362] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1218.186217] env[69992]: DEBUG nova.network.neutron [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Successfully created port: 445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1218.251661] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457719} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.254897] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1218.255182] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1218.256368] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d68eb32-2e9a-4114-8fc7-812c6d30f653 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.263262] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1218.263262] env[69992]: value = "task-2897591" [ 1218.263262] env[69992]: _type = "Task" [ 1218.263262] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.268459] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.276068] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.296485] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a43c39b-1960-478d-9867-1ec6168f448e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.304666] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8e3672-daaa-4f91-b990-e5b1a69433f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.338829] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14d972b-d706-4d1e-abef-8a573a00d644 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.345719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd42312-beb7-48ae-a8f2-e6ac50294b87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.351292] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1218.366268] env[69992]: DEBUG nova.compute.provider_tree [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.415704] env[69992]: DEBUG nova.compute.manager [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1218.415704] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1218.416063] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9b527d0-0a1a-473d-85ed-0366eaf45602 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.423731] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1218.423731] env[69992]: value = "task-2897592" [ 1218.423731] env[69992]: _type = "Task" [ 1218.423731] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.432554] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.776672] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066784} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.776937] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1218.777739] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2124b0f9-bf34-4da8-a38f-c5856e657d2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.797443] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1218.798017] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a903e441-f0d8-443d-8956-68197d615704 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.818036] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1218.818036] env[69992]: value = "task-2897593" [ 1218.818036] env[69992]: _type = "Task" [ 1218.818036] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.827575] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.873340] env[69992]: DEBUG nova.scheduler.client.report [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1218.933707] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897592, 'name': PowerOffVM_Task, 'duration_secs': 0.253586} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.933962] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1218.934208] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1218.934407] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581892', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'name': 'volume-a203e79e-9126-47e8-96d7-9c0a57c68179', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'serial': 'a203e79e-9126-47e8-96d7-9c0a57c68179'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1218.935303] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2e81a3-7997-4f8f-8812-e406afa40336 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.953491] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d939230-4859-48ac-90fa-205148e38475 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.962102] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674309d9-ffa5-4374-8437-e1c258087714 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.981631] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf9c6c6-8125-4355-b2c1-1a1b7725fbae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.995394] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] The volume has not been displaced from its original location: [datastore2] volume-a203e79e-9126-47e8-96d7-9c0a57c68179/volume-a203e79e-9126-47e8-96d7-9c0a57c68179.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1219.001199] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Reconfiguring VM instance instance-00000028 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1219.002595] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-704b1f18-32ea-4c30-b74b-c18ab5bfb015 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.023647] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1219.023647] env[69992]: value = "task-2897594" [ 1219.023647] env[69992]: _type = "Task" [ 1219.023647] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.032376] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.199483] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1219.199736] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1219.199896] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1219.200175] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1219.200331] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1219.200479] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1219.200690] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1219.200849] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1219.201052] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1219.201189] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1219.201361] env[69992]: DEBUG nova.virt.hardware [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1219.202267] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc457f1a-c38b-4fd5-a22e-fabf5a9b0e8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.209740] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f9b2f7-aa52-4395-b305-87ac5c9ac776 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.223021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1219.228660] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1219.229036] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1219.229243] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1eab520c-ef99-4a19-8e70-2d30c59c5f6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.245797] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1219.245797] env[69992]: value = "task-2897595" [ 1219.245797] env[69992]: _type = "Task" [ 1219.245797] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.253685] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897595, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.327323] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897593, 'name': ReconfigVM_Task, 'duration_secs': 0.289374} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.327617] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b/451a8af1-a4a2-4c2d-932c-58955491433b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1219.328401] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e3f54db-2c68-4a3d-a7b1-41b9ca7e7370 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.335181] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1219.335181] env[69992]: value = "task-2897596" [ 1219.335181] env[69992]: _type = "Task" [ 1219.335181] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.344853] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897596, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.378619] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1219.381303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.039s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.383417] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.331s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.383608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.385383] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.845s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.387041] env[69992]: INFO nova.compute.claims [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1219.405439] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1219.405627] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1219.406567] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1219.406567] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1219.406567] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1219.406567] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1219.406923] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1219.406923] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1219.407084] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1219.407282] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1219.407485] env[69992]: DEBUG nova.virt.hardware [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1219.408463] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef2cb7d-330d-44f6-b977-84fc4d4cc4c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.412479] env[69992]: INFO nova.scheduler.client.report [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted allocations for instance a7f01cd7-f148-48fc-a71a-5461672d6039 [ 1219.414289] env[69992]: INFO nova.scheduler.client.report [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Deleted allocations for instance 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6 [ 1219.423619] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ee81c6-40ef-48a8-b68b-adf4679a5c67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.519580] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.520094] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.520644] env[69992]: DEBUG nova.objects.instance [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'flavor' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.535841] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897594, 'name': ReconfigVM_Task, 'duration_secs': 0.166739} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.536761] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Reconfigured VM instance instance-00000028 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1219.543544] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f81b31c-8e78-4541-8099-a0fcdc7e2878 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.561822] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1219.561822] env[69992]: value = "task-2897597" [ 1219.561822] env[69992]: _type = "Task" [ 1219.561822] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.570476] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897597, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.756060] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897595, 'name': CreateVM_Task, 'duration_secs': 0.319742} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.756230] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1219.756682] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.756812] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.757146] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1219.757698] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2e541d5-00c7-40d3-a399-4edd54bf1dcd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.762350] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1219.762350] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52169903-ff2a-7b16-b1c0-424c3e9041d7" [ 1219.762350] env[69992]: _type = "Task" [ 1219.762350] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.765983] env[69992]: DEBUG nova.network.neutron [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Successfully updated port: 445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1219.771876] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52169903-ff2a-7b16-b1c0-424c3e9041d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.845119] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897596, 'name': Rename_Task, 'duration_secs': 0.174901} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.845501] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1219.845742] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57f4d85a-da9b-4c88-89f8-2638a543d8fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.851978] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1219.851978] env[69992]: value = "task-2897598" [ 1219.851978] env[69992]: _type = "Task" [ 1219.851978] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.859067] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.922897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bad74599-a5e2-40a6-89d2-3533e26e8aed tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "a7f01cd7-f148-48fc-a71a-5461672d6039" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.239s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.926073] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1525be1d-a61f-49c3-8655-1f1eedbab33d tempest-AttachInterfacesUnderV243Test-777888026 tempest-AttachInterfacesUnderV243Test-777888026-project-member] Lock "30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.596s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.072142] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897597, 'name': ReconfigVM_Task, 'duration_secs': 0.280854} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.072440] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-581892', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'name': 'volume-a203e79e-9126-47e8-96d7-9c0a57c68179', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2', 'attached_at': '', 'detached_at': '', 'volume_id': 'a203e79e-9126-47e8-96d7-9c0a57c68179', 'serial': 'a203e79e-9126-47e8-96d7-9c0a57c68179'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1220.072699] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1220.073572] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d311d516-220f-4c77-8f2f-123079db331c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.080033] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1220.082299] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6f0b242-1f89-4fee-a278-17e4c198362c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.112200] env[69992]: DEBUG nova.objects.instance [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'pci_requests' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.148422] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1220.148586] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1220.149204] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Deleting the datastore file [datastore2] fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.149204] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be7446b5-954d-4c39-8059-62693d402eea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.156037] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for the task: (returnval){ [ 1220.156037] env[69992]: value = "task-2897600" [ 1220.156037] env[69992]: _type = "Task" [ 1220.156037] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.165679] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.210028] env[69992]: DEBUG nova.compute.manager [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Received event network-vif-plugged-445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1220.210028] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] Acquiring lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.210028] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.210028] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.210028] env[69992]: DEBUG nova.compute.manager [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] No waiting events found dispatching network-vif-plugged-445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1220.210028] env[69992]: WARNING nova.compute.manager [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Received unexpected event network-vif-plugged-445cdcf8-38ea-4465-a568-4f4e63c483dd for instance with vm_state building and task_state spawning. [ 1220.210028] env[69992]: DEBUG nova.compute.manager [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Received event network-changed-445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1220.210028] env[69992]: DEBUG nova.compute.manager [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Refreshing instance network info cache due to event network-changed-445cdcf8-38ea-4465-a568-4f4e63c483dd. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1220.210028] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] Acquiring lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.210028] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] Acquired lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.210028] env[69992]: DEBUG nova.network.neutron [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Refreshing network info cache for port 445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1220.273283] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.273732] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52169903-ff2a-7b16-b1c0-424c3e9041d7, 'name': SearchDatastore_Task, 'duration_secs': 0.009321} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.274132] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.274502] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1220.274858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.275157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.275448] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1220.276136] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59692495-7b51-41b4-ae0c-1eccb15b733d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.284648] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1220.284970] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1220.285753] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c47d797-9417-45a5-9996-7fd3700bc55a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.291350] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1220.291350] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5298ff81-645f-1d44-aa6c-9c494cf29693" [ 1220.291350] env[69992]: _type = "Task" [ 1220.291350] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.298601] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5298ff81-645f-1d44-aa6c-9c494cf29693, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.362027] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897598, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.614594] env[69992]: DEBUG nova.objects.base [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Object Instance<5f98a2aa-eb7b-41d2-9e9f-14cee9445942> lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1220.614776] env[69992]: DEBUG nova.network.neutron [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1220.667321] env[69992]: DEBUG oslo_vmware.api [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Task: {'id': task-2897600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078885} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.668267] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.668267] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.668267] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.668267] env[69992]: INFO nova.compute.manager [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1220.668451] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1220.668645] env[69992]: DEBUG nova.compute.manager [-] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1220.668740] env[69992]: DEBUG nova.network.neutron [-] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1220.693047] env[69992]: DEBUG nova.policy [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1220.736567] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca84dae-f9ed-44d6-916d-6955b6b05898 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.744889] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85647452-4640-4e58-8035-910f2fe12912 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.779708] env[69992]: DEBUG nova.network.neutron [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1220.781857] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428e1f06-d9d0-4b0b-a89f-f0312aa64624 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.789261] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d132f84-6aa4-4879-a46a-447a655fabe1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.805140] env[69992]: DEBUG nova.compute.provider_tree [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1220.809984] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5298ff81-645f-1d44-aa6c-9c494cf29693, 'name': SearchDatastore_Task, 'duration_secs': 0.009063} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.810606] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fb02078-adbc-4049-b2fd-2e9d92fc4f20 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.816411] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1220.816411] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ad6d13-6ace-cc87-e32e-6fe312fff3eb" [ 1220.816411] env[69992]: _type = "Task" [ 1220.816411] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.824559] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ad6d13-6ace-cc87-e32e-6fe312fff3eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.862728] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897598, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.863713] env[69992]: DEBUG nova.network.neutron [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.311014] env[69992]: DEBUG nova.scheduler.client.report [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1221.327767] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ad6d13-6ace-cc87-e32e-6fe312fff3eb, 'name': SearchDatastore_Task, 'duration_secs': 0.011413} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.328280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.328570] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1221.328880] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08eba2a9-7036-4730-8357-9aec130662a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.335434] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1221.335434] env[69992]: value = "task-2897601" [ 1221.335434] env[69992]: _type = "Task" [ 1221.335434] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.343645] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.361973] env[69992]: DEBUG oslo_vmware.api [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897598, 'name': PowerOnVM_Task, 'duration_secs': 1.092623} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.362832] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1221.362832] env[69992]: DEBUG nova.compute.manager [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1221.363620] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e45cefd-b375-4f00-85e5-cf529338fccc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.366252] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d4da9fe-c93c-496b-898e-66f16c880210 req-9460f6b9-d445-43db-b755-8c16d8654bda service nova] Releasing lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.366590] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.366748] env[69992]: DEBUG nova.network.neutron [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.492304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.492640] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.796744] env[69992]: DEBUG nova.network.neutron [-] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.816704] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.817613] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1221.821448] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.823595] env[69992]: DEBUG nova.objects.instance [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lazy-loading 'resources' on Instance uuid 408de352-797c-40c2-86bc-359e01c5c04e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1221.848430] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897601, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.882773] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.904547] env[69992]: DEBUG nova.network.neutron [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1221.994877] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1222.049690] env[69992]: DEBUG nova.network.neutron [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Updating instance_info_cache with network_info: [{"id": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "address": "fa:16:3e:3d:53:32", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap445cdcf8-38", "ovs_interfaceid": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.300018] env[69992]: INFO nova.compute.manager [-] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Took 1.63 seconds to deallocate network for instance. [ 1222.325363] env[69992]: DEBUG nova.compute.utils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1222.333493] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1222.333493] env[69992]: DEBUG nova.network.neutron [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1222.349995] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548034} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.350435] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1222.350860] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1222.351275] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58662f38-5e1d-46ea-9140-d57a20ef6aaf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.357890] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1222.357890] env[69992]: value = "task-2897602" [ 1222.357890] env[69992]: _type = "Task" [ 1222.357890] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.368177] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.398473] env[69992]: DEBUG nova.policy [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1222.402561] env[69992]: DEBUG nova.compute.manager [req-96910ac1-e786-445f-bea9-d188c5b1aaed req-2a6d4b49-e7c6-4a37-8c71-d563c5e7851c service nova] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Received event network-vif-deleted-bf5a36db-df8e-4dd4-9248-fdb5f256bc7b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1222.467149] env[69992]: DEBUG nova.network.neutron [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Successfully updated port: 82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1222.518545] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.553501] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.553809] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance network_info: |[{"id": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "address": "fa:16:3e:3d:53:32", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap445cdcf8-38", "ovs_interfaceid": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1222.555179] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:53:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '445cdcf8-38ea-4465-a568-4f4e63c483dd', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1222.562444] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1222.562711] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1222.562872] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69c9e765-26a4-42bf-bea1-c0d4839cbd56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.586018] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1222.586018] env[69992]: value = "task-2897603" [ 1222.586018] env[69992]: _type = "Task" [ 1222.586018] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.598778] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897603, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.665091] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.665305] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1222.720680] env[69992]: DEBUG nova.network.neutron [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Successfully created port: 9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1222.795119] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e14918-c6a9-4196-aea7-4369f8a10ba8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.803277] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268f1645-bdc9-4bcf-9ebe-93df213a140a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.835609] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4a0809-70e0-488a-ad27-dfa520d9c52b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.838622] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1222.846538] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e94ddf-8c01-4895-8433-b983c247b270 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.861105] env[69992]: DEBUG nova.compute.provider_tree [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1222.863592] env[69992]: INFO nova.compute.manager [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Took 0.56 seconds to detach 1 volumes for instance. [ 1222.868270] env[69992]: DEBUG nova.compute.manager [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Deleting volume: a203e79e-9126-47e8-96d7-9c0a57c68179 {{(pid=69992) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1222.877450] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066085} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.877701] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.879331] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0f3e33-7700-4d2a-9163-fc9a2ccfb6b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.902070] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.902070] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-818cd39e-0a22-4405-b89c-cc9a4ebffbc4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.931788] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1222.931788] env[69992]: value = "task-2897604" [ 1222.931788] env[69992]: _type = "Task" [ 1222.931788] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.942540] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897604, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.975511] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.975622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.975867] env[69992]: DEBUG nova.network.neutron [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1223.095997] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897603, 'name': CreateVM_Task, 'duration_secs': 0.358292} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.095997] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1223.096567] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.096712] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.097026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1223.097285] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69be711b-a2c7-4657-8797-02b619b9785b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.102119] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1223.102119] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526546c9-b65b-a190-c303-0b4ac13cbb36" [ 1223.102119] env[69992]: _type = "Task" [ 1223.102119] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.110251] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526546c9-b65b-a190-c303-0b4ac13cbb36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.175443] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.175651] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.175808] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.175962] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.176123] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.176271] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.176562] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1223.176562] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.371431] env[69992]: DEBUG nova.scheduler.client.report [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1223.424989] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.442896] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897604, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.517666] env[69992]: WARNING nova.network.neutron [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] 7c8e9b14-bcc2-45f2-8b37-5f478b75057e already exists in list: networks containing: ['7c8e9b14-bcc2-45f2-8b37-5f478b75057e']. ignoring it [ 1223.517807] env[69992]: WARNING nova.network.neutron [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] 7c8e9b14-bcc2-45f2-8b37-5f478b75057e already exists in list: networks containing: ['7c8e9b14-bcc2-45f2-8b37-5f478b75057e']. ignoring it [ 1223.567201] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.567490] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.567712] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.567919] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.568112] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.570856] env[69992]: INFO nova.compute.manager [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Terminating instance [ 1223.614439] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526546c9-b65b-a190-c303-0b4ac13cbb36, 'name': SearchDatastore_Task, 'duration_secs': 0.014842} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.614627] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.614754] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1223.614972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.615139] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.615324] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.615595] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33018585-207e-4750-8145-4af2a8ca6068 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.628464] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.628464] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1223.629352] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b723ba6-ef31-44d4-8d6a-47d5df2a587d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.634785] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1223.634785] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52097dec-1af0-44e8-2437-89490d76fd97" [ 1223.634785] env[69992]: _type = "Task" [ 1223.634785] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.644602] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52097dec-1af0-44e8-2437-89490d76fd97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.679423] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.727326] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "451a8af1-a4a2-4c2d-932c-58955491433b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.727618] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "451a8af1-a4a2-4c2d-932c-58955491433b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.727859] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "451a8af1-a4a2-4c2d-932c-58955491433b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.728107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "451a8af1-a4a2-4c2d-932c-58955491433b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.728304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "451a8af1-a4a2-4c2d-932c-58955491433b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.731484] env[69992]: INFO nova.compute.manager [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Terminating instance [ 1223.850281] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1223.877612] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1223.877916] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1223.878087] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1223.878462] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1223.878655] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1223.878810] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1223.879031] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1223.879464] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1223.880079] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1223.880079] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1223.880079] env[69992]: DEBUG nova.virt.hardware [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1223.880803] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.059s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.883894] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336f6398-e155-409e-9dea-083a34170d77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.889292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.092s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.891066] env[69992]: INFO nova.compute.claims [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1223.899606] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658c7dd3-99a9-4ee8-90ea-bc70c2219d95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.914510] env[69992]: INFO nova.scheduler.client.report [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted allocations for instance 408de352-797c-40c2-86bc-359e01c5c04e [ 1223.943330] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897604, 'name': ReconfigVM_Task, 'duration_secs': 0.688668} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.943813] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190/a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.944357] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd2ff53b-fb3f-4987-a752-06769c6f1257 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.951592] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1223.951592] env[69992]: value = "task-2897606" [ 1223.951592] env[69992]: _type = "Task" [ 1223.951592] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.962773] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897606, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.991931] env[69992]: DEBUG nova.network.neutron [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "address": "fa:16:3e:43:bd:3a", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ead8f7e-9a", "ovs_interfaceid": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "82ef9ca1-4fe1-48ff-bf80-ca693a1f7662", "address": "fa:16:3e:da:5b:2f", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82ef9ca1-4f", "ovs_interfaceid": "82ef9ca1-4fe1-48ff-bf80-ca693a1f7662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.075561] env[69992]: DEBUG nova.compute.manager [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1224.075794] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1224.076696] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b653156e-c96a-4bfe-93cb-bbfb1a701e5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.084497] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1224.084748] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-837e2d20-20f6-4446-85c5-fe806d9ada77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.091703] env[69992]: DEBUG oslo_vmware.api [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 1224.091703] env[69992]: value = "task-2897607" [ 1224.091703] env[69992]: _type = "Task" [ 1224.091703] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.100126] env[69992]: DEBUG oslo_vmware.api [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2897607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.152209] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52097dec-1af0-44e8-2437-89490d76fd97, 'name': SearchDatastore_Task, 'duration_secs': 0.013688} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.152209] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e8b2b5b-1f12-499d-95ff-9c4642c7ffe6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.158764] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1224.158764] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522aa012-fbc7-db99-7d2c-6d6c170b58b9" [ 1224.158764] env[69992]: _type = "Task" [ 1224.158764] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.171015] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522aa012-fbc7-db99-7d2c-6d6c170b58b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.235437] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "refresh_cache-451a8af1-a4a2-4c2d-932c-58955491433b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.235834] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquired lock "refresh_cache-451a8af1-a4a2-4c2d-932c-58955491433b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.236148] env[69992]: DEBUG nova.network.neutron [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1224.269769] env[69992]: DEBUG nova.network.neutron [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Successfully updated port: 9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1224.422335] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e17728fa-836b-49f0-bfa5-dfad5db1e365 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "408de352-797c-40c2-86bc-359e01c5c04e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.158s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.463691] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897606, 'name': Rename_Task, 'duration_secs': 0.252512} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.463964] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1224.464224] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de22973f-c307-4c21-aaa0-0cbe2e22d45c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.470560] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1224.470560] env[69992]: value = "task-2897608" [ 1224.470560] env[69992]: _type = "Task" [ 1224.470560] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.480157] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.494858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.495789] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.495789] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.496528] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47fe38b-ba98-431c-a7b8-bf00ee942d13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.514556] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1224.514807] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1224.514964] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1224.515189] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1224.515361] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1224.516026] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1224.516026] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1224.516026] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1224.516026] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1224.516226] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1224.516349] env[69992]: DEBUG nova.virt.hardware [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1224.522800] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfiguring VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1224.523248] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ae8cbfe-2cad-4c87-8454-dd627e81a19f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.541031] env[69992]: DEBUG oslo_vmware.api [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1224.541031] env[69992]: value = "task-2897609" [ 1224.541031] env[69992]: _type = "Task" [ 1224.541031] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.548673] env[69992]: DEBUG oslo_vmware.api [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897609, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.600985] env[69992]: DEBUG oslo_vmware.api [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2897607, 'name': PowerOffVM_Task, 'duration_secs': 0.26344} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.601310] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1224.601484] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1224.601737] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e878dd0-924d-4907-a34c-6dab08b92f4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.664803] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1224.665028] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1224.665220] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Deleting the datastore file [datastore2] fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.665829] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64da008c-5ad4-4cdd-8a19-8ed1cea02790 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.671266] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522aa012-fbc7-db99-7d2c-6d6c170b58b9, 'name': SearchDatastore_Task, 'duration_secs': 0.012909} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.672827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.672827] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] c4bd5585-d917-4d92-9ce8-fa1950944f25/c4bd5585-d917-4d92-9ce8-fa1950944f25.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1224.673603] env[69992]: DEBUG oslo_vmware.api [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for the task: (returnval){ [ 1224.673603] env[69992]: value = "task-2897611" [ 1224.673603] env[69992]: _type = "Task" [ 1224.673603] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.673603] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e333559-9939-43da-9a14-73c2ce303510 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.682762] env[69992]: DEBUG oslo_vmware.api [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2897611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.685839] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1224.685839] env[69992]: value = "task-2897612" [ 1224.685839] env[69992]: _type = "Task" [ 1224.685839] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.691687] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897612, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.758303] env[69992]: DEBUG nova.network.neutron [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1224.772346] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-953c0e0d-3279-444c-b631-6ebbf24e5487" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.772554] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-953c0e0d-3279-444c-b631-6ebbf24e5487" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.772714] env[69992]: DEBUG nova.network.neutron [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1224.796190] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-vif-plugged-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.796365] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.796574] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.796728] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.796949] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] No waiting events found dispatching network-vif-plugged-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1224.797489] env[69992]: WARNING nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received unexpected event network-vif-plugged-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 for instance with vm_state active and task_state None. [ 1224.797733] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-changed-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.797902] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing instance network info cache due to event network-changed-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1224.798137] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.798284] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.798444] env[69992]: DEBUG nova.network.neutron [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Refreshing network info cache for port 82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.865602] env[69992]: DEBUG nova.network.neutron [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.991166] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.054249] env[69992]: DEBUG oslo_vmware.api [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.190650] env[69992]: DEBUG oslo_vmware.api [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Task: {'id': task-2897611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236389} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.194131] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1225.194329] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1225.194515] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1225.194696] env[69992]: INFO nova.compute.manager [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1225.194953] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1225.197900] env[69992]: DEBUG nova.compute.manager [-] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1225.198039] env[69992]: DEBUG nova.network.neutron [-] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1225.205145] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897612, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.367836] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1894de0f-f65b-4c4f-8279-4a620d93cace {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.373157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Releasing lock "refresh_cache-451a8af1-a4a2-4c2d-932c-58955491433b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.373573] env[69992]: DEBUG nova.compute.manager [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1225.373762] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1225.374609] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adac939a-9c1d-49a4-81fe-0749db9c1a52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.384035] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e4c0ab-f3df-4a73-9ed3-ef7adc441e2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.387294] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1225.387536] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44380c6c-2fb8-47c2-8caf-8495bfddfbb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.393452] env[69992]: DEBUG oslo_vmware.api [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1225.393452] env[69992]: value = "task-2897613" [ 1225.393452] env[69992]: _type = "Task" [ 1225.393452] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.426274] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fcd731-58b2-4e4e-b1d5-d45e2a8b5526 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.437314] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d0e704-ec59-42e0-966d-c2b22294fbd4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.441475] env[69992]: DEBUG oslo_vmware.api [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.452713] env[69992]: DEBUG nova.compute.provider_tree [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.484200] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897608, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.511158] env[69992]: DEBUG nova.network.neutron [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1225.555556] env[69992]: DEBUG oslo_vmware.api [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897609, 'name': ReconfigVM_Task, 'duration_secs': 0.607809} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.555556] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.557639] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfigured VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1225.614750] env[69992]: DEBUG nova.network.neutron [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updated VIF entry in instance network info cache for port 82ef9ca1-4fe1-48ff-bf80-ca693a1f7662. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1225.615228] env[69992]: DEBUG nova.network.neutron [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "address": "fa:16:3e:43:bd:3a", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ead8f7e-9a", "ovs_interfaceid": "4ead8f7e-9ac9-474b-9302-a618d1bf1988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "82ef9ca1-4fe1-48ff-bf80-ca693a1f7662", "address": "fa:16:3e:da:5b:2f", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82ef9ca1-4f", "ovs_interfaceid": "82ef9ca1-4fe1-48ff-bf80-ca693a1f7662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.701821] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897612, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666615} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.702101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] c4bd5585-d917-4d92-9ce8-fa1950944f25/c4bd5585-d917-4d92-9ce8-fa1950944f25.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1225.702323] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1225.702658] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5977ad39-67d7-479e-8622-174091735b78 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.709373] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1225.709373] env[69992]: value = "task-2897614" [ 1225.709373] env[69992]: _type = "Task" [ 1225.709373] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.717770] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.843940] env[69992]: DEBUG nova.network.neutron [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Updating instance_info_cache with network_info: [{"id": "9e523523-ecdf-4308-88c7-6336fdea2bee", "address": "fa:16:3e:3e:a1:f2", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e523523-ec", "ovs_interfaceid": "9e523523-ecdf-4308-88c7-6336fdea2bee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.931638] env[69992]: DEBUG oslo_vmware.api [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897613, 'name': PowerOffVM_Task, 'duration_secs': 0.397171} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.932444] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1225.936166] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1225.936491] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95d48df0-9633-4fd7-93bc-9a27e835476a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.956595] env[69992]: DEBUG nova.scheduler.client.report [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1225.961459] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1225.961669] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1225.961922] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Deleting the datastore file [datastore2] 451a8af1-a4a2-4c2d-932c-58955491433b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1225.962208] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-018b3de7-0ca9-438a-a32f-3166fbdd388e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.969805] env[69992]: DEBUG oslo_vmware.api [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for the task: (returnval){ [ 1225.969805] env[69992]: value = "task-2897616" [ 1225.969805] env[69992]: _type = "Task" [ 1225.969805] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.979869] env[69992]: DEBUG oslo_vmware.api [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.986098] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897608, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.065951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5510a6ff-7651-4311-95cb-fc5a59a45e69 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.546s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.118491] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.118762] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Received event network-vif-plugged-9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1226.119046] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Acquiring lock "953c0e0d-3279-444c-b631-6ebbf24e5487-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.119321] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.119526] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.120519] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] No waiting events found dispatching network-vif-plugged-9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1226.120519] env[69992]: WARNING nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Received unexpected event network-vif-plugged-9e523523-ecdf-4308-88c7-6336fdea2bee for instance with vm_state building and task_state spawning. [ 1226.120519] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Received event network-changed-9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1226.120519] env[69992]: DEBUG nova.compute.manager [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Refreshing instance network info cache due to event network-changed-9e523523-ecdf-4308-88c7-6336fdea2bee. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1226.120519] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Acquiring lock "refresh_cache-953c0e0d-3279-444c-b631-6ebbf24e5487" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.218698] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073203} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.218967] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1226.219793] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b95041-f75f-41eb-b0e4-e16df3a7b1ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.246233] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] c4bd5585-d917-4d92-9ce8-fa1950944f25/c4bd5585-d917-4d92-9ce8-fa1950944f25.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1226.246233] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f5360e6-1f62-4d64-a931-c520b00e85b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.266151] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1226.266151] env[69992]: value = "task-2897617" [ 1226.266151] env[69992]: _type = "Task" [ 1226.266151] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.277091] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897617, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.329717] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "546fb923-4574-4407-8625-69e6c4d8d35e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.329823] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "546fb923-4574-4407-8625-69e6c4d8d35e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.330074] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "546fb923-4574-4407-8625-69e6c4d8d35e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.330316] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "546fb923-4574-4407-8625-69e6c4d8d35e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.334017] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "546fb923-4574-4407-8625-69e6c4d8d35e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.334017] env[69992]: INFO nova.compute.manager [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Terminating instance [ 1226.346443] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-953c0e0d-3279-444c-b631-6ebbf24e5487" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.346582] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Instance network_info: |[{"id": "9e523523-ecdf-4308-88c7-6336fdea2bee", "address": "fa:16:3e:3e:a1:f2", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e523523-ec", "ovs_interfaceid": "9e523523-ecdf-4308-88c7-6336fdea2bee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1226.347305] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Acquired lock "refresh_cache-953c0e0d-3279-444c-b631-6ebbf24e5487" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.347540] env[69992]: DEBUG nova.network.neutron [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Refreshing network info cache for port 9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1226.349560] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:a1:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e523523-ecdf-4308-88c7-6336fdea2bee', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1226.358288] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1226.358480] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1226.358725] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c6e4ae1-aa4e-4f07-81e2-5b89156ef404 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.380565] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1226.380565] env[69992]: value = "task-2897618" [ 1226.380565] env[69992]: _type = "Task" [ 1226.380565] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.391030] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897618, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.468083] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.468717] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1226.475070] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.120s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.475070] env[69992]: DEBUG nova.objects.instance [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'resources' on Instance uuid dd31269e-716c-44cd-9fc3-ce227fe5b3b2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.487895] env[69992]: DEBUG oslo_vmware.api [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Task: {'id': task-2897616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22419} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.491224] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.491433] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1226.491612] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1226.491779] env[69992]: INFO nova.compute.manager [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1226.492032] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1226.492247] env[69992]: DEBUG oslo_vmware.api [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897608, 'name': PowerOnVM_Task, 'duration_secs': 1.739999} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.492719] env[69992]: DEBUG nova.compute.manager [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1226.492882] env[69992]: DEBUG nova.network.neutron [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1226.494570] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1226.494777] env[69992]: DEBUG nova.compute.manager [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1226.495614] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dff08ca-45c0-4a11-b64a-b6b24be12594 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.523669] env[69992]: DEBUG nova.network.neutron [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1226.780251] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897617, 'name': ReconfigVM_Task, 'duration_secs': 0.355387} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.780701] env[69992]: DEBUG nova.network.neutron [-] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.781950] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Reconfigured VM instance instance-0000004e to attach disk [datastore1] c4bd5585-d917-4d92-9ce8-fa1950944f25/c4bd5585-d917-4d92-9ce8-fa1950944f25.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1226.782258] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9977d19d-d1e4-45f7-86ef-b56a46d88e06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.788955] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1226.788955] env[69992]: value = "task-2897619" [ 1226.788955] env[69992]: _type = "Task" [ 1226.788955] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.796688] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897619, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.836815] env[69992]: DEBUG nova.compute.manager [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1226.837140] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.838364] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd04e911-4abe-4bbf-9415-e21a35af2de1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.846026] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.846274] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82fe6537-d92e-43ff-8733-6c5ed5b9f623 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.858777] env[69992]: DEBUG oslo_vmware.api [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1226.858777] env[69992]: value = "task-2897620" [ 1226.858777] env[69992]: _type = "Task" [ 1226.858777] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.872017] env[69992]: DEBUG oslo_vmware.api [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.894505] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897618, 'name': CreateVM_Task, 'duration_secs': 0.374461} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.895396] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1226.895561] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.895793] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.896159] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1226.896477] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d7fcd2c-43cd-4dbe-8c20-1f3c9438b848 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.903469] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1226.903469] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6d9d5-32dd-1ac8-d903-f35f71f9b8cd" [ 1226.903469] env[69992]: _type = "Task" [ 1226.903469] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.915180] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6d9d5-32dd-1ac8-d903-f35f71f9b8cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.974690] env[69992]: DEBUG nova.compute.utils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1226.980958] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1226.980958] env[69992]: DEBUG nova.network.neutron [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.019274] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.032922] env[69992]: DEBUG nova.network.neutron [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.062554] env[69992]: DEBUG nova.policy [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c21797a1c257494fada96ef2bc436e53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8561522a702a48b9ae92d6c4c5de095c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1227.153376] env[69992]: DEBUG nova.network.neutron [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Updated VIF entry in instance network info cache for port 9e523523-ecdf-4308-88c7-6336fdea2bee. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1227.154138] env[69992]: DEBUG nova.network.neutron [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Updating instance_info_cache with network_info: [{"id": "9e523523-ecdf-4308-88c7-6336fdea2bee", "address": "fa:16:3e:3e:a1:f2", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e523523-ec", "ovs_interfaceid": "9e523523-ecdf-4308-88c7-6336fdea2bee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.285438] env[69992]: INFO nova.compute.manager [-] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Took 2.09 seconds to deallocate network for instance. [ 1227.303469] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897619, 'name': Rename_Task, 'duration_secs': 0.163209} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.303746] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1227.303988] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed9a062a-6aff-4211-9936-f98b794d089d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.313143] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1227.313143] env[69992]: value = "task-2897621" [ 1227.313143] env[69992]: _type = "Task" [ 1227.313143] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.321730] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.376316] env[69992]: DEBUG oslo_vmware.api [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897620, 'name': PowerOffVM_Task, 'duration_secs': 0.199677} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.376555] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1227.376765] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1227.377056] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbb61e35-bca7-491b-9fce-9226d33555fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.396603] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "c6e4f19b-7264-4eea-a472-f64a68d4df22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.396788] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.423748] env[69992]: DEBUG nova.compute.manager [req-c1e3b5bf-8c66-4efd-9baf-2698b0f912e2 req-5cff6d6f-5c1c-42c8-b690-16a5880d3629 service nova] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Received event network-vif-deleted-c01a5abb-0c56-4377-ab40-619062fc6092 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.424502] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6d9d5-32dd-1ac8-d903-f35f71f9b8cd, 'name': SearchDatastore_Task, 'duration_secs': 0.012428} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.428095] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.428736] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1227.429029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.429187] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.429458] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.430437] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec111db5-b6d0-449d-b774-5406c6ce4dca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.442030] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.442030] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1227.443056] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e989cf55-5563-4b3c-a2d3-8cc155881af2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.450853] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1227.450853] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1227.450853] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleting the datastore file [datastore1] 546fb923-4574-4407-8625-69e6c4d8d35e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.450853] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24f56fd4-c271-477e-a86e-895b288d39fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.454845] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1227.454845] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bfdb45-24ea-eaf6-7510-d1decd408daa" [ 1227.454845] env[69992]: _type = "Task" [ 1227.454845] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.459715] env[69992]: DEBUG oslo_vmware.api [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1227.459715] env[69992]: value = "task-2897623" [ 1227.459715] env[69992]: _type = "Task" [ 1227.459715] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.467473] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bfdb45-24ea-eaf6-7510-d1decd408daa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.475599] env[69992]: DEBUG oslo_vmware.api [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.482341] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1227.520527] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f72f62-baa5-4796-8a9a-3300b0f64ae3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.527487] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32de4af1-b321-4d83-8419-b0d76b0f0acf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.315543] env[69992]: INFO nova.compute.manager [-] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Took 1.82 seconds to deallocate network for instance. [ 1228.317744] env[69992]: DEBUG oslo_concurrency.lockutils [req-4c26ffac-760f-49b4-a86f-d3aa1be05bbd req-48745e2c-f3ba-4089-9a87-1ff13b78785d service nova] Releasing lock "refresh_cache-953c0e0d-3279-444c-b631-6ebbf24e5487" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.318696] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.319107] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1228.325276] env[69992]: DEBUG nova.network.neutron [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Successfully created port: 24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1228.341438] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65f11b2-7ed0-4ab9-91ca-6591677986ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.355912] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bfdb45-24ea-eaf6-7510-d1decd408daa, 'name': SearchDatastore_Task, 'duration_secs': 0.011079} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.356050] env[69992]: DEBUG oslo_vmware.api [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897621, 'name': PowerOnVM_Task, 'duration_secs': 0.664748} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.361284] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1228.361284] env[69992]: INFO nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Took 8.98 seconds to spawn the instance on the hypervisor. [ 1228.361452] env[69992]: DEBUG nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1228.361771] env[69992]: DEBUG oslo_vmware.api [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271093} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.362561] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08aba4b7-ec14-455a-a8de-a2846843443e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.365283] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c6bcdb-163a-4ea3-b7a8-f3de5fe7517c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.368842] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e39e9a-d380-46a9-bbd1-7dcee10f1143 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.372464] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1228.372547] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1228.372732] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1228.372910] env[69992]: INFO nova.compute.manager [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Took 1.54 seconds to destroy the instance on the hypervisor. [ 1228.373160] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1228.374488] env[69992]: DEBUG nova.compute.manager [-] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1228.374590] env[69992]: DEBUG nova.network.neutron [-] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1228.379971] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1228.379971] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e84ef2-0469-10be-aafa-affa73c645ef" [ 1228.379971] env[69992]: _type = "Task" [ 1228.379971] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.390474] env[69992]: DEBUG nova.compute.provider_tree [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.403123] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e84ef2-0469-10be-aafa-affa73c645ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.847469] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.847954] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-4ead8f7e-9ac9-474b-9302-a618d1bf1988" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.848234] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-4ead8f7e-9ac9-474b-9302-a618d1bf1988" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.852103] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1228.854144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.854411] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.854665] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.855625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.855625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.859273] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.860317] env[69992]: INFO nova.compute.manager [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Terminating instance [ 1228.886734] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.887110] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.887373] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.887373] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.887512] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.887657] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.887907] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.888127] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.888349] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.888588] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.888699] env[69992]: DEBUG nova.virt.hardware [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.890105] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449790f3-e72c-4f2a-95c3-24869515e0f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.897011] env[69992]: DEBUG nova.scheduler.client.report [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.910355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa4b8c1-f0ac-4d1f-b0c2-66504ac406ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.918641] env[69992]: INFO nova.compute.manager [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Took 42.59 seconds to build instance. [ 1228.919710] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e84ef2-0469-10be-aafa-affa73c645ef, 'name': SearchDatastore_Task, 'duration_secs': 0.025599} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.920795] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.921116] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 953c0e0d-3279-444c-b631-6ebbf24e5487/953c0e0d-3279-444c-b631-6ebbf24e5487.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1228.922092] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79b3ed33-78e2-4342-959b-3afcc04718fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.937424] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1228.937424] env[69992]: value = "task-2897624" [ 1228.937424] env[69992]: _type = "Task" [ 1228.937424] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.947121] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.242540] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.242819] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.283367] env[69992]: DEBUG nova.network.neutron [-] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.361458] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.361800] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.363713] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee53e1f-7af6-4411-a819-64aba4a75630 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.367814] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "refresh_cache-a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.368163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquired lock "refresh_cache-a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.368267] env[69992]: DEBUG nova.network.neutron [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1229.387767] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1a73db-37dc-4baf-8749-b8d1f18d96c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.417471] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.945s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.425638] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfiguring VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1229.426398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.055s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.426674] env[69992]: DEBUG nova.objects.instance [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lazy-loading 'resources' on Instance uuid 1d436762-964d-40d9-871e-ee33c3ba25b5 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.428572] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bc0045f9-c0a1-4b17-ab6d-232dca2253b4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.118s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.429284] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08c13d69-35e3-4813-822a-97dc579fb9be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.449692] env[69992]: INFO nova.scheduler.client.report [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted allocations for instance dd31269e-716c-44cd-9fc3-ce227fe5b3b2 [ 1229.459415] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1229.459415] env[69992]: value = "task-2897625" [ 1229.459415] env[69992]: _type = "Task" [ 1229.459415] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.460317] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897624, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.471632] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.535688] env[69992]: DEBUG nova.compute.manager [req-397a91b0-413a-4d22-91bc-26d3ee40cc7c req-2c8959e2-6b2d-4e37-b143-35d533c9fff7 service nova] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Received event network-vif-deleted-37beebe5-49d5-45f4-9dff-8ea169c1920f {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1229.745970] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1229.785812] env[69992]: INFO nova.compute.manager [-] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Took 1.41 seconds to deallocate network for instance. [ 1229.903579] env[69992]: DEBUG nova.network.neutron [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1229.956130] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897624, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.860627} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.956392] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 953c0e0d-3279-444c-b631-6ebbf24e5487/953c0e0d-3279-444c-b631-6ebbf24e5487.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1229.956605] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1229.956847] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a76a472-54bf-44a8-9ce8-93cf4c6e8818 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.967348] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1229.967348] env[69992]: value = "task-2897626" [ 1229.967348] env[69992]: _type = "Task" [ 1229.967348] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.967799] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4be9b5-e56e-4268-b769-95c9cf1457f6 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "dd31269e-716c-44cd-9fc3-ce227fe5b3b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.349s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.975790] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.983329] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.005105] env[69992]: DEBUG nova.network.neutron [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.011956] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.012213] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.012544] env[69992]: INFO nova.compute.manager [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Shelving [ 1230.124119] env[69992]: DEBUG nova.network.neutron [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Successfully updated port: 24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1230.268798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.292741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.404344] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9ad400-e267-47d1-94ca-0c080221b244 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.412322] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd437fc2-732c-4ee0-92d4-44537e950ee8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.441518] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a9e58a-e12e-4bcf-9591-f98b00f730b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.448776] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739dfbad-1f7b-400c-8b53-3d5c79413bb1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.461758] env[69992]: DEBUG nova.compute.provider_tree [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.470688] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.478236] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.2244} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.479166] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.479916] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902b4bd7-b46d-46b7-b83a-2e6ba1dd2f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.503806] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 953c0e0d-3279-444c-b631-6ebbf24e5487/953c0e0d-3279-444c-b631-6ebbf24e5487.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.504408] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8b9faf5-4a83-43eb-bc46-23f7d91e4d8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.518547] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Releasing lock "refresh_cache-a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.518946] env[69992]: DEBUG nova.compute.manager [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1230.519173] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1230.521998] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780c918a-885e-42c6-aaea-22ea03d1fbb1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.530628] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1230.530628] env[69992]: value = "task-2897627" [ 1230.530628] env[69992]: _type = "Task" [ 1230.530628] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.533058] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1230.536084] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8862bf7-d708-4ffd-ae38-1590b7de9ec4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.545165] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897627, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.549662] env[69992]: DEBUG oslo_vmware.api [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1230.549662] env[69992]: value = "task-2897628" [ 1230.549662] env[69992]: _type = "Task" [ 1230.549662] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.560600] env[69992]: DEBUG oslo_vmware.api [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.625336] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.625538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquired lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.625733] env[69992]: DEBUG nova.network.neutron [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1230.964814] env[69992]: DEBUG nova.scheduler.client.report [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.983210] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.033474] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1231.034102] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fd2de84-b6f3-4afa-bfc0-6fa613cee389 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.044876] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.046406] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1231.046406] env[69992]: value = "task-2897629" [ 1231.046406] env[69992]: _type = "Task" [ 1231.046406] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.058776] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.061956] env[69992]: DEBUG oslo_vmware.api [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897628, 'name': PowerOffVM_Task, 'duration_secs': 0.135259} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.062230] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1231.062404] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1231.062669] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3641dd64-d142-41d2-967b-7ebe0dab0628 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.085360] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1231.085610] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1231.085806] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Deleting the datastore file [datastore1] a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.086100] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73cd51a1-5816-460d-a694-6e56fc6facfe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.092214] env[69992]: DEBUG oslo_vmware.api [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for the task: (returnval){ [ 1231.092214] env[69992]: value = "task-2897631" [ 1231.092214] env[69992]: _type = "Task" [ 1231.092214] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.101147] env[69992]: DEBUG oslo_vmware.api [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.162795] env[69992]: DEBUG nova.network.neutron [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1231.308910] env[69992]: DEBUG nova.network.neutron [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Updating instance_info_cache with network_info: [{"id": "24614f86-0f65-4b7b-b425-05b92f02312b", "address": "fa:16:3e:f1:62:bd", "network": {"id": "643c6aea-6c8f-47f6-91d4-e437a7f1f34f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1696978642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561522a702a48b9ae92d6c4c5de095c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24614f86-0f", "ovs_interfaceid": "24614f86-0f65-4b7b-b425-05b92f02312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.481166] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.483641] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.484186] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.476s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.484435] env[69992]: DEBUG nova.objects.instance [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lazy-loading 'resources' on Instance uuid af07ebd0-5f12-49c3-a518-95be9a8d6c82 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.516684] env[69992]: INFO nova.scheduler.client.report [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Deleted allocations for instance 1d436762-964d-40d9-871e-ee33c3ba25b5 [ 1231.542886] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897627, 'name': ReconfigVM_Task, 'duration_secs': 0.738091} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.543813] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 953c0e0d-3279-444c-b631-6ebbf24e5487/953c0e0d-3279-444c-b631-6ebbf24e5487.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.544992] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d488132-505d-4b98-a931-a8d829f14b70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.551794] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1231.551794] env[69992]: value = "task-2897632" [ 1231.551794] env[69992]: _type = "Task" [ 1231.551794] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.559146] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897629, 'name': PowerOffVM_Task, 'duration_secs': 0.261388} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.559808] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1231.560644] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8c79ee-cbf9-4e44-883d-30d5367ea812 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.567130] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897632, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.585778] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab39203-594f-4044-8e2f-cfe2de23ee03 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.602958] env[69992]: DEBUG oslo_vmware.api [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Task: {'id': task-2897631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094246} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.603321] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1231.603555] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1231.603972] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1231.603972] env[69992]: INFO nova.compute.manager [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1231.604310] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1231.604559] env[69992]: DEBUG nova.compute.manager [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1231.604701] env[69992]: DEBUG nova.network.neutron [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1231.621079] env[69992]: DEBUG nova.network.neutron [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1231.812615] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Releasing lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.812615] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Instance network_info: |[{"id": "24614f86-0f65-4b7b-b425-05b92f02312b", "address": "fa:16:3e:f1:62:bd", "network": {"id": "643c6aea-6c8f-47f6-91d4-e437a7f1f34f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1696978642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561522a702a48b9ae92d6c4c5de095c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24614f86-0f", "ovs_interfaceid": "24614f86-0f65-4b7b-b425-05b92f02312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1231.812993] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:62:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24614f86-0f65-4b7b-b425-05b92f02312b', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.821085] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Creating folder: Project (8561522a702a48b9ae92d6c4c5de095c). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.822520] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-577f4269-dc95-47a1-b9e0-cc90394b165e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.825902] env[69992]: DEBUG nova.compute.manager [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Received event network-vif-plugged-24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1231.826190] env[69992]: DEBUG oslo_concurrency.lockutils [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] Acquiring lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.826464] env[69992]: DEBUG oslo_concurrency.lockutils [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.826661] env[69992]: DEBUG oslo_concurrency.lockutils [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.826835] env[69992]: DEBUG nova.compute.manager [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] No waiting events found dispatching network-vif-plugged-24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1231.827010] env[69992]: WARNING nova.compute.manager [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Received unexpected event network-vif-plugged-24614f86-0f65-4b7b-b425-05b92f02312b for instance with vm_state building and task_state spawning. [ 1231.827185] env[69992]: DEBUG nova.compute.manager [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Received event network-changed-24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1231.827335] env[69992]: DEBUG nova.compute.manager [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Refreshing instance network info cache due to event network-changed-24614f86-0f65-4b7b-b425-05b92f02312b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1231.827517] env[69992]: DEBUG oslo_concurrency.lockutils [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] Acquiring lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.827654] env[69992]: DEBUG oslo_concurrency.lockutils [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] Acquired lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.827803] env[69992]: DEBUG nova.network.neutron [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Refreshing network info cache for port 24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.839333] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Created folder: Project (8561522a702a48b9ae92d6c4c5de095c) in parent group-v581821. [ 1231.839515] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Creating folder: Instances. Parent ref: group-v582046. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.839742] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0501d653-c3f0-43e0-b687-2dc8bb2dd08b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.848467] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Created folder: Instances in parent group-v582046. [ 1231.848691] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1231.848867] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1231.849068] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff05350c-9c41-4c1b-94e6-f3276f725901 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.867188] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.867188] env[69992]: value = "task-2897635" [ 1231.867188] env[69992]: _type = "Task" [ 1231.867188] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.876040] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897635, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.979144] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.025503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-940f74fa-66b6-4dc4-8738-8fd795ec01db tempest-MigrationsAdminTest-2064433656 tempest-MigrationsAdminTest-2064433656-project-member] Lock "1d436762-964d-40d9-871e-ee33c3ba25b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.290s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.062706] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897632, 'name': Rename_Task, 'duration_secs': 0.14245} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.062706] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.063187] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c6214e3-757b-4417-837a-f015cfaf0160 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.072296] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1232.072296] env[69992]: value = "task-2897636" [ 1232.072296] env[69992]: _type = "Task" [ 1232.072296] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.088852] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.100030] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1232.101038] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c3533914-4e85-44de-8627-3c3c37abbdf1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.108035] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1232.108035] env[69992]: value = "task-2897637" [ 1232.108035] env[69992]: _type = "Task" [ 1232.108035] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.115904] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897637, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.124012] env[69992]: DEBUG nova.network.neutron [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.306909] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.307203] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.377453] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897635, 'name': CreateVM_Task, 'duration_secs': 0.340925} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.380933] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1232.382065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.382391] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.382761] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1232.385051] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a53b4856-aa63-4974-afd9-bbc9093e9c48 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.391062] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1232.391062] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52373f28-55be-114f-63a8-1d4055055eee" [ 1232.391062] env[69992]: _type = "Task" [ 1232.391062] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.392999] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63aa45fd-a974-46de-a41c-a2aa769bdef1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.405991] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae90449c-e0a8-4206-b744-2657e6528fff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.409270] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52373f28-55be-114f-63a8-1d4055055eee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.437596] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5643e0-b0fa-49bc-a8f8-983cf6d28b81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.449226] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb2dd5c-e0f1-4d97-84e3-3f5c9ce436f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.464181] env[69992]: DEBUG nova.compute.provider_tree [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.478452] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.586281] env[69992]: DEBUG oslo_vmware.api [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897636, 'name': PowerOnVM_Task, 'duration_secs': 0.506839} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.587677] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.587832] env[69992]: INFO nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1232.588048] env[69992]: DEBUG nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1232.588892] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4353614-7807-4afd-9acc-6025ac451f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.598931] env[69992]: DEBUG nova.network.neutron [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Updated VIF entry in instance network info cache for port 24614f86-0f65-4b7b-b425-05b92f02312b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1232.599279] env[69992]: DEBUG nova.network.neutron [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Updating instance_info_cache with network_info: [{"id": "24614f86-0f65-4b7b-b425-05b92f02312b", "address": "fa:16:3e:f1:62:bd", "network": {"id": "643c6aea-6c8f-47f6-91d4-e437a7f1f34f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1696978642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561522a702a48b9ae92d6c4c5de095c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24614f86-0f", "ovs_interfaceid": "24614f86-0f65-4b7b-b425-05b92f02312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.617847] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897637, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.626699] env[69992]: INFO nova.compute.manager [-] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Took 1.02 seconds to deallocate network for instance. [ 1232.811364] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1232.905090] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52373f28-55be-114f-63a8-1d4055055eee, 'name': SearchDatastore_Task, 'duration_secs': 0.013777} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.905700] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.906129] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.906542] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.906852] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.907099] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.907403] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09d448c4-1996-430b-b393-8d34b11f49da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.915746] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.915932] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.916694] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74990e7b-732e-408d-bd88-45df7e858136 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.922361] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1232.922361] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52efef9a-9557-b10b-ed43-fccdb646b301" [ 1232.922361] env[69992]: _type = "Task" [ 1232.922361] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.930951] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52efef9a-9557-b10b-ed43-fccdb646b301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.971482] env[69992]: DEBUG nova.scheduler.client.report [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.985918] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.108371] env[69992]: DEBUG oslo_concurrency.lockutils [req-e46b0fd7-d408-41ba-b4eb-2c0fac88590f req-4a60eb6b-002d-48c1-8844-b3e7c6752aeb service nova] Releasing lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.114219] env[69992]: INFO nova.compute.manager [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Took 38.59 seconds to build instance. [ 1233.121506] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897637, 'name': CreateSnapshot_Task, 'duration_secs': 0.672208} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.122816] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1233.122816] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e98cb9-f305-427f-80ec-cf25636e61d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.133402] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.333523] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.434222] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52efef9a-9557-b10b-ed43-fccdb646b301, 'name': SearchDatastore_Task, 'duration_secs': 0.009699} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.435118] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbbcec54-a139-4c65-a1bd-88df54e9279d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.441323] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1233.441323] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521322e5-5ab1-5fea-b311-030c24713f68" [ 1233.441323] env[69992]: _type = "Task" [ 1233.441323] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.449196] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521322e5-5ab1-5fea-b311-030c24713f68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.476708] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.479292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.519s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.479652] env[69992]: DEBUG nova.objects.instance [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lazy-loading 'resources' on Instance uuid 4e93b655-aaf4-49b8-bbb2-92287ec15bbc {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.490557] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.497212] env[69992]: INFO nova.scheduler.client.report [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Deleted allocations for instance af07ebd0-5f12-49c3-a518-95be9a8d6c82 [ 1233.617238] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4364245e-3d4c-4578-8ecf-97b1634eb341 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.104s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.641391] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1233.641733] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a8e1b50e-4cdc-4ae0-aebb-c841e5069de6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.650791] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1233.650791] env[69992]: value = "task-2897638" [ 1233.650791] env[69992]: _type = "Task" [ 1233.650791] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.658503] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897638, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.952320] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521322e5-5ab1-5fea-b311-030c24713f68, 'name': SearchDatastore_Task, 'duration_secs': 0.011609} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.952713] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.953034] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 9591b360-414b-4aa9-94b2-5b9ccb9e7d39/9591b360-414b-4aa9-94b2-5b9ccb9e7d39.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1233.953285] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee63f570-e28e-458c-a8b5-aa5fcc829b44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.959968] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1233.959968] env[69992]: value = "task-2897639" [ 1233.959968] env[69992]: _type = "Task" [ 1233.959968] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.968091] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897639, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.995792] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.004856] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c9074a17-f09f-49d5-a42b-94c0ade49ee2 tempest-ServersTestBootFromVolume-624231863 tempest-ServersTestBootFromVolume-624231863-project-member] Lock "af07ebd0-5f12-49c3-a518-95be9a8d6c82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.437s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.166395] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897638, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.435204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba89205-aac7-4ade-822e-a5b260116d43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.445476] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d3f3a7-aea2-461e-9feb-17a9776fe7f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.479696] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16909856-1b92-413b-8586-30c611536d04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.490768] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897639, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507707} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.492373] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e766ac93-881c-486d-b781-e17c8b4a2b52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.496596] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 9591b360-414b-4aa9-94b2-5b9ccb9e7d39/9591b360-414b-4aa9-94b2-5b9ccb9e7d39.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.496907] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.500089] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9619ab0d-f38d-4483-bf26-4acd1ed515b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.502117] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.514078] env[69992]: DEBUG nova.compute.provider_tree [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.518018] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1234.518018] env[69992]: value = "task-2897640" [ 1234.518018] env[69992]: _type = "Task" [ 1234.518018] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.523689] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897640, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.649154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "953c0e0d-3279-444c-b631-6ebbf24e5487" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.649479] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.649655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "953c0e0d-3279-444c-b631-6ebbf24e5487-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.649820] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.650025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.655588] env[69992]: INFO nova.compute.manager [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Terminating instance [ 1234.664579] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897638, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.993731] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.018161] env[69992]: DEBUG nova.scheduler.client.report [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1235.036057] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897640, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070634} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.036410] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1235.037338] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ecc3db-35f7-407b-b5c2-84c59474105a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.065298] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 9591b360-414b-4aa9-94b2-5b9ccb9e7d39/9591b360-414b-4aa9-94b2-5b9ccb9e7d39.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1235.066302] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-522c6908-7d72-4ee2-bbd1-6492cb1ec1c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.096513] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1235.096513] env[69992]: value = "task-2897641" [ 1235.096513] env[69992]: _type = "Task" [ 1235.096513] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.105423] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897641, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.162399] env[69992]: DEBUG nova.compute.manager [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1235.162687] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1235.163079] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897638, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.163948] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e151a1e4-6bdc-481e-b69a-f4e18a0d1a97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.171752] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.172173] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cf93e19-ae90-49e3-bac1-37371b17df56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.178883] env[69992]: DEBUG oslo_vmware.api [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1235.178883] env[69992]: value = "task-2897642" [ 1235.178883] env[69992]: _type = "Task" [ 1235.178883] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.188116] env[69992]: DEBUG oslo_vmware.api [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.493397] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.531572] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.532181] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.649s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.532841] env[69992]: DEBUG nova.objects.instance [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lazy-loading 'resources' on Instance uuid a35dd590-b5ff-4878-8aa5-8797814d8779 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.560318] env[69992]: INFO nova.scheduler.client.report [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Deleted allocations for instance 4e93b655-aaf4-49b8-bbb2-92287ec15bbc [ 1235.606255] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897641, 'name': ReconfigVM_Task, 'duration_secs': 0.281969} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.606927] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 9591b360-414b-4aa9-94b2-5b9ccb9e7d39/9591b360-414b-4aa9-94b2-5b9ccb9e7d39.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1235.608210] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffe78a73-99a9-4e65-b476-c771d5652111 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.614235] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1235.614235] env[69992]: value = "task-2897643" [ 1235.614235] env[69992]: _type = "Task" [ 1235.614235] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.623090] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897643, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.662595] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897638, 'name': CloneVM_Task, 'duration_secs': 1.863365} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.662905] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Created linked-clone VM from snapshot [ 1235.663691] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc151a3-8a66-4a77-8ee6-86744b8543bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.671452] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Uploading image 93d88d09-8938-48a9-89d3-80a51514dcfd {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1235.688326] env[69992]: DEBUG oslo_vmware.api [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897642, 'name': PowerOffVM_Task, 'duration_secs': 0.188428} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.690399] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.690590] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.691366] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9ec7fc7-87f1-462f-b634-70ddb71f5935 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.696573] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1235.696573] env[69992]: value = "vm-582050" [ 1235.696573] env[69992]: _type = "VirtualMachine" [ 1235.696573] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1235.696869] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e939a354-44d0-4e38-9a82-d3bd758ed9b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.704553] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lease: (returnval){ [ 1235.704553] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5205a5bc-e526-75a5-00a2-d82c9343a71d" [ 1235.704553] env[69992]: _type = "HttpNfcLease" [ 1235.704553] env[69992]: } obtained for exporting VM: (result){ [ 1235.704553] env[69992]: value = "vm-582050" [ 1235.704553] env[69992]: _type = "VirtualMachine" [ 1235.704553] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1235.704836] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the lease: (returnval){ [ 1235.704836] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5205a5bc-e526-75a5-00a2-d82c9343a71d" [ 1235.704836] env[69992]: _type = "HttpNfcLease" [ 1235.704836] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1235.713804] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1235.713804] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5205a5bc-e526-75a5-00a2-d82c9343a71d" [ 1235.713804] env[69992]: _type = "HttpNfcLease" [ 1235.713804] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1235.753607] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1235.753840] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1235.754036] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore2] 953c0e0d-3279-444c-b631-6ebbf24e5487 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.754327] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a069b856-4a32-4c55-b14c-9325485bfaeb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.762221] env[69992]: DEBUG oslo_vmware.api [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1235.762221] env[69992]: value = "task-2897646" [ 1235.762221] env[69992]: _type = "Task" [ 1235.762221] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.773167] env[69992]: DEBUG oslo_vmware.api [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.993866] env[69992]: DEBUG oslo_vmware.api [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897625, 'name': ReconfigVM_Task, 'duration_secs': 6.110383} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.994178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.995295] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfigured VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1236.070209] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d14f41bd-6153-48f4-bef9-bfda48abe262 tempest-ServerShowV247Test-1722258493 tempest-ServerShowV247Test-1722258493-project-member] Lock "4e93b655-aaf4-49b8-bbb2-92287ec15bbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.383s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.127582] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897643, 'name': Rename_Task, 'duration_secs': 0.147402} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.128729] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1236.129127] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be30ff23-97e0-4851-91a3-c8e45d5d8ea2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.138017] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1236.138017] env[69992]: value = "task-2897647" [ 1236.138017] env[69992]: _type = "Task" [ 1236.138017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.150567] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897647, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.216972] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1236.216972] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5205a5bc-e526-75a5-00a2-d82c9343a71d" [ 1236.216972] env[69992]: _type = "HttpNfcLease" [ 1236.216972] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1236.218244] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1236.218244] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5205a5bc-e526-75a5-00a2-d82c9343a71d" [ 1236.218244] env[69992]: _type = "HttpNfcLease" [ 1236.218244] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1236.218689] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882a22d1-74a4-4d92-a552-ab1c90a27893 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.228046] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525194af-152a-46b4-6f00-a439dcbb463a/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1236.229407] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525194af-152a-46b4-6f00-a439dcbb463a/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1236.307531] env[69992]: DEBUG oslo_vmware.api [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140839} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.307865] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.308506] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.308506] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.308506] env[69992]: INFO nova.compute.manager [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1236.310158] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.310158] env[69992]: DEBUG nova.compute.manager [-] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1236.310158] env[69992]: DEBUG nova.network.neutron [-] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1236.336841] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6ed6dc6d-999e-47fb-8b76-0267fc1a6d86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.474970] env[69992]: DEBUG nova.compute.manager [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-vif-deleted-4ead8f7e-9ac9-474b-9302-a618d1bf1988 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1236.475177] env[69992]: INFO nova.compute.manager [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Neutron deleted interface 4ead8f7e-9ac9-474b-9302-a618d1bf1988; detaching it from the instance and deleting it from the info cache [ 1236.475462] env[69992]: DEBUG nova.network.neutron [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "82ef9ca1-4fe1-48ff-bf80-ca693a1f7662", "address": "fa:16:3e:da:5b:2f", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82ef9ca1-4f", "ovs_interfaceid": "82ef9ca1-4fe1-48ff-bf80-ca693a1f7662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.590799] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe594d0-0847-4326-9c17-4638264e3fc8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.599395] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354efcb7-1100-43e8-a556-f307a3d294c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.634933] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032f3ef7-f95d-4192-bd97-3625ae8a3d04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.645771] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77743ded-8ced-476c-b068-b5175287ecf8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.652926] env[69992]: DEBUG oslo_vmware.api [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897647, 'name': PowerOnVM_Task, 'duration_secs': 0.450017} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.653605] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1236.653834] env[69992]: INFO nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1236.654063] env[69992]: DEBUG nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1236.655046] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a266b91-9ab5-476a-95d9-c49f81e08efe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.666984] env[69992]: DEBUG nova.compute.provider_tree [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.820250] env[69992]: DEBUG nova.compute.manager [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-vif-deleted-82ef9ca1-4fe1-48ff-bf80-ca693a1f7662 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1236.820250] env[69992]: INFO nova.compute.manager [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Neutron deleted interface 82ef9ca1-4fe1-48ff-bf80-ca693a1f7662; detaching it from the instance and deleting it from the info cache [ 1236.820250] env[69992]: DEBUG nova.network.neutron [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.978577] env[69992]: DEBUG oslo_concurrency.lockutils [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.978627] env[69992]: DEBUG oslo_concurrency.lockutils [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] Acquired lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.979872] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef237f7-aa4b-4174-8daf-82d1bedaf96f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.999503] env[69992]: DEBUG oslo_concurrency.lockutils [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] Releasing lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.000301] env[69992]: WARNING nova.compute.manager [req-8a65fc64-1e09-48b6-b9c8-b58d4406dee0 req-6f06b153-67e1-48ef-a597-5bb063390444 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Detach interface failed, port_id=4ead8f7e-9ac9-474b-9302-a618d1bf1988, reason: No device with interface-id 4ead8f7e-9ac9-474b-9302-a618d1bf1988 exists on VM: nova.exception.NotFound: No device with interface-id 4ead8f7e-9ac9-474b-9302-a618d1bf1988 exists on VM [ 1237.170360] env[69992]: DEBUG nova.network.neutron [-] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.176029] env[69992]: DEBUG nova.scheduler.client.report [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.190634] env[69992]: INFO nova.compute.manager [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Took 38.41 seconds to build instance. [ 1237.323047] env[69992]: DEBUG oslo_concurrency.lockutils [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.323292] env[69992]: DEBUG oslo_concurrency.lockutils [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Acquired lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.324228] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7fd390-cd32-4deb-8595-ae8fb48c79b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.348257] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3932cc12-e02d-43fb-93e3-686998f22a64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.376720] env[69992]: DEBUG nova.virt.vmwareapi.vmops [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfiguring VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1237.376720] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-566319ac-ac2c-443c-a6de-17a3149a08e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.387521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.393541] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Waiting for the task: (returnval){ [ 1237.393541] env[69992]: value = "task-2897648" [ 1237.393541] env[69992]: _type = "Task" [ 1237.393541] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.403485] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.455836] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.455958] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.456162] env[69992]: DEBUG nova.network.neutron [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1237.679058] env[69992]: INFO nova.compute.manager [-] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Took 1.37 seconds to deallocate network for instance. [ 1237.680500] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.684557] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.662s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.685160] env[69992]: DEBUG nova.objects.instance [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lazy-loading 'resources' on Instance uuid 033d667f-5511-4254-a7e2-f8a2a94178d1 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.693116] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9de8ef4c-523e-4227-abe7-71b4cf320b0f tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.920s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.711208] env[69992]: INFO nova.scheduler.client.report [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted allocations for instance a35dd590-b5ff-4878-8aa5-8797814d8779 [ 1237.906448] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.195176] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.224985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eecfe255-9bce-41a6-b8b6-01dc2038848d tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "a35dd590-b5ff-4878-8aa5-8797814d8779" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.962s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.406616] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.502209] env[69992]: DEBUG nova.compute.manager [req-19e7e4e3-0527-4c20-bbc3-eeea6b8b344f req-461e6d5d-2224-479d-9172-00f9d5264ecf service nova] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Received event network-vif-deleted-9e523523-ecdf-4308-88c7-6336fdea2bee {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1238.600929] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47aa35e4-db26-483d-80b2-a3fd52c39608 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.609048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203bac8a-1681-46be-8b7e-b46a99138318 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.642149] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313fd2d1-527b-43e1-841f-1df611fb8685 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.650321] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1595ac3b-22eb-41ac-b3ed-eff0e04cea5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.655511] env[69992]: DEBUG nova.network.neutron [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [{"id": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "address": "fa:16:3e:6e:ee:46", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap48ef557e-b0", "ovs_interfaceid": "48ef557e-b0bc-4415-84c9-60b9146b4ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.667020] env[69992]: DEBUG nova.compute.provider_tree [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.907982] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.160999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.173022] env[69992]: DEBUG nova.scheduler.client.report [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1239.423334] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.663512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-aed8a553-6640-4825-a2e1-2ba6bae5dca2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-5f98a2aa-eb7b-41d2-9e9f-14cee9445942-4ead8f7e-9ac9-474b-9302-a618d1bf1988" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.815s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.676957] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.993s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.680776] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.585s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.682049] env[69992]: INFO nova.compute.claims [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1239.699287] env[69992]: INFO nova.scheduler.client.report [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Deleted allocations for instance 033d667f-5511-4254-a7e2-f8a2a94178d1 [ 1239.909180] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.206735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-66afb6f0-75a2-45e2-8508-41c1e46878ab tempest-MultipleCreateTestJSON-664245217 tempest-MultipleCreateTestJSON-664245217-project-member] Lock "033d667f-5511-4254-a7e2-f8a2a94178d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.845s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.412037] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.667170] env[69992]: DEBUG nova.compute.manager [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Received event network-changed-24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.667170] env[69992]: DEBUG nova.compute.manager [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Refreshing instance network info cache due to event network-changed-24614f86-0f65-4b7b-b425-05b92f02312b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1240.667170] env[69992]: DEBUG oslo_concurrency.lockutils [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] Acquiring lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.667170] env[69992]: DEBUG oslo_concurrency.lockutils [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] Acquired lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.667170] env[69992]: DEBUG nova.network.neutron [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Refreshing network info cache for port 24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1240.913073] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.061436] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f297d1f-62cc-4619-9dd7-ccba5cdbfff2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.069532] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fe40e7-e12c-49bd-9e7b-502621d8079d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.102825] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad105cb-a573-47d4-9780-506737232b4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.110336] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6ef425-0bee-4644-aaa9-0c1de47ddc0f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.125059] env[69992]: DEBUG nova.compute.provider_tree [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.411535] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.417785] env[69992]: DEBUG nova.network.neutron [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Updated VIF entry in instance network info cache for port 24614f86-0f65-4b7b-b425-05b92f02312b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1241.418175] env[69992]: DEBUG nova.network.neutron [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Updating instance_info_cache with network_info: [{"id": "24614f86-0f65-4b7b-b425-05b92f02312b", "address": "fa:16:3e:f1:62:bd", "network": {"id": "643c6aea-6c8f-47f6-91d4-e437a7f1f34f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1696978642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8561522a702a48b9ae92d6c4c5de095c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24614f86-0f", "ovs_interfaceid": "24614f86-0f65-4b7b-b425-05b92f02312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.630106] env[69992]: DEBUG nova.scheduler.client.report [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1241.912782] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.920854] env[69992]: DEBUG oslo_concurrency.lockutils [req-d3fa0247-4906-4bee-a89c-dc640b00d5ff req-0992063d-a892-4a40-9f18-6625681e5de2 service nova] Releasing lock "refresh_cache-9591b360-414b-4aa9-94b2-5b9ccb9e7d39" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.135993] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.136579] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1242.139321] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.461s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.140975] env[69992]: INFO nova.compute.claims [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.412697] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.646544] env[69992]: DEBUG nova.compute.utils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1242.650399] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1242.650588] env[69992]: DEBUG nova.network.neutron [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1242.711801] env[69992]: DEBUG nova.policy [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a23fe9c1b114911a0bd8c628708e4ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '24eeae734354423c8b30683ab02b3984', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1242.915160] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.023901] env[69992]: DEBUG nova.network.neutron [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Successfully created port: e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1243.151752] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1243.418739] env[69992]: DEBUG oslo_vmware.api [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Task: {'id': task-2897648, 'name': ReconfigVM_Task, 'duration_secs': 5.767907} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.419045] env[69992]: DEBUG oslo_concurrency.lockutils [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] Releasing lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.420842] env[69992]: DEBUG nova.virt.vmwareapi.vmops [req-2fa9d149-6c4b-438e-9bf8-992d13f9890a req-45deace3-8e81-4234-a794-b02d6f51d30b service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Reconfigured VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1243.420842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.032s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.420842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.420842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.420842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.424238] env[69992]: INFO nova.compute.manager [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Terminating instance [ 1243.548193] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525194af-152a-46b4-6f00-a439dcbb463a/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1243.549790] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d2427a-921a-4945-afb2-653318874b4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.557754] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525194af-152a-46b4-6f00-a439dcbb463a/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1243.557934] env[69992]: ERROR oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525194af-152a-46b4-6f00-a439dcbb463a/disk-0.vmdk due to incomplete transfer. [ 1243.558183] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e4004fdc-906d-4555-8f92-47a7f4500d5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.564663] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2fdfa1-4a6b-4e69-8881-5db835179960 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.569693] env[69992]: DEBUG oslo_vmware.rw_handles [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525194af-152a-46b4-6f00-a439dcbb463a/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1243.569693] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Uploaded image 93d88d09-8938-48a9-89d3-80a51514dcfd to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1243.570908] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1243.571529] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5582609c-9bfb-4893-9a89-de156c73fa9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.576481] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bdf16c-d743-415c-9e7d-10baad4e8007 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.580024] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1243.580024] env[69992]: value = "task-2897649" [ 1243.580024] env[69992]: _type = "Task" [ 1243.580024] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.608174] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce11816-f351-485d-a747-6f339dc3ae79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.613583] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897649, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.618140] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcc7807-5cef-450e-9197-b01a771cbf73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.632410] env[69992]: DEBUG nova.compute.provider_tree [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.928020] env[69992]: DEBUG nova.compute.manager [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1243.928327] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1243.929271] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b769a9c9-b889-4da5-9cac-0b7fb159c1c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.936955] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1243.937224] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb4491b0-8e3e-45f9-bb0a-0dc3049290b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.943819] env[69992]: DEBUG oslo_vmware.api [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1243.943819] env[69992]: value = "task-2897650" [ 1243.943819] env[69992]: _type = "Task" [ 1243.943819] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.951784] env[69992]: DEBUG oslo_vmware.api [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.090666] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897649, 'name': Destroy_Task, 'duration_secs': 0.341468} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.091038] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Destroyed the VM [ 1244.091735] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1244.093493] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-df4021ea-82a5-4f8b-8baa-0e609ae47d91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.099618] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1244.099618] env[69992]: value = "task-2897651" [ 1244.099618] env[69992]: _type = "Task" [ 1244.099618] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.112974] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897651, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.135331] env[69992]: DEBUG nova.scheduler.client.report [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1244.165361] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1244.191976] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1244.192251] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1244.192408] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1244.192607] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1244.192758] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1244.192904] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1244.193222] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1244.193405] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1244.193594] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1244.193759] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1244.193956] env[69992]: DEBUG nova.virt.hardware [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1244.195253] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbc2b05-13f5-4a3e-a902-ea5744e61f3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.204413] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd2995a-20d2-47d2-a0e0-2b672fa02350 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.458447] env[69992]: DEBUG oslo_vmware.api [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897650, 'name': PowerOffVM_Task, 'duration_secs': 0.359168} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.458893] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1244.459245] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1244.459624] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e1bbceb-7f51-48df-ae53-7a7257dc1b0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.529062] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1244.529295] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1244.529482] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleting the datastore file [datastore1] 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.530806] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6984b390-69b8-47ca-a78b-a0ebeb06d24c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.537363] env[69992]: DEBUG oslo_vmware.api [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1244.537363] env[69992]: value = "task-2897653" [ 1244.537363] env[69992]: _type = "Task" [ 1244.537363] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.548498] env[69992]: DEBUG oslo_vmware.api [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.609740] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897651, 'name': RemoveSnapshot_Task, 'duration_secs': 0.367839} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.610135] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1244.610454] env[69992]: DEBUG nova.compute.manager [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1244.611316] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a10a52-1995-464d-96f3-6ef61e2eb4f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.640316] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.640842] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1244.643711] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.458s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.643950] env[69992]: DEBUG nova.objects.instance [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lazy-loading 'resources' on Instance uuid 7fc7c481-75e8-40f2-a971-752ce6dde59b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.772140] env[69992]: DEBUG nova.network.neutron [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Successfully updated port: e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1244.808211] env[69992]: DEBUG nova.compute.manager [req-6565f2a5-1dbe-4acd-af9d-242601b38b74 req-bdf9e4ed-7a0c-4228-915c-f34ff87ca3be service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Received event network-vif-plugged-e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.808211] env[69992]: DEBUG oslo_concurrency.lockutils [req-6565f2a5-1dbe-4acd-af9d-242601b38b74 req-bdf9e4ed-7a0c-4228-915c-f34ff87ca3be service nova] Acquiring lock "7fa33d98-20b7-4162-a354-24cfea17701f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.808211] env[69992]: DEBUG oslo_concurrency.lockutils [req-6565f2a5-1dbe-4acd-af9d-242601b38b74 req-bdf9e4ed-7a0c-4228-915c-f34ff87ca3be service nova] Lock "7fa33d98-20b7-4162-a354-24cfea17701f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.808211] env[69992]: DEBUG oslo_concurrency.lockutils [req-6565f2a5-1dbe-4acd-af9d-242601b38b74 req-bdf9e4ed-7a0c-4228-915c-f34ff87ca3be service nova] Lock "7fa33d98-20b7-4162-a354-24cfea17701f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.808211] env[69992]: DEBUG nova.compute.manager [req-6565f2a5-1dbe-4acd-af9d-242601b38b74 req-bdf9e4ed-7a0c-4228-915c-f34ff87ca3be service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] No waiting events found dispatching network-vif-plugged-e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1244.808211] env[69992]: WARNING nova.compute.manager [req-6565f2a5-1dbe-4acd-af9d-242601b38b74 req-bdf9e4ed-7a0c-4228-915c-f34ff87ca3be service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Received unexpected event network-vif-plugged-e7144a56-29c2-4983-b57d-7e1b28077d55 for instance with vm_state building and task_state spawning. [ 1245.048754] env[69992]: DEBUG oslo_vmware.api [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182241} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.049025] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1245.049204] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1245.049384] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1245.049554] env[69992]: INFO nova.compute.manager [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1245.049793] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1245.049979] env[69992]: DEBUG nova.compute.manager [-] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1245.050107] env[69992]: DEBUG nova.network.neutron [-] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1245.125941] env[69992]: INFO nova.compute.manager [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Shelve offloading [ 1245.147810] env[69992]: DEBUG nova.compute.utils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1245.152672] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1245.152858] env[69992]: DEBUG nova.network.neutron [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1245.219859] env[69992]: DEBUG nova.policy [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8380e6e9bb87424793504916dbc01790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34923aa9da1d46cc9d22d569d9428781', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1245.276958] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "refresh_cache-7fa33d98-20b7-4162-a354-24cfea17701f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.276958] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquired lock "refresh_cache-7fa33d98-20b7-4162-a354-24cfea17701f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.276958] env[69992]: DEBUG nova.network.neutron [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.567290] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57c6c43-0adb-4d33-911e-e6f0aacd0983 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.577279] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7f11c1-c611-4af6-8832-0ceca20a7b57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.615870] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe5fd7d-0f67-4a85-b70a-13c2a6bdd22a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.628389] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d595af3e-13af-4b0d-a76a-fa01022be97e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.632495] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1245.633199] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71d53f56-2449-46cd-a02a-e732a3d878fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.645291] env[69992]: DEBUG nova.compute.provider_tree [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.652269] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1245.652269] env[69992]: value = "task-2897654" [ 1245.652269] env[69992]: _type = "Task" [ 1245.652269] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.656788] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1245.667154] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1245.667154] env[69992]: DEBUG nova.compute.manager [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.667913] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7561639c-a0a6-48f5-ab12-054162e56b69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.677818] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.679583] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.679583] env[69992]: DEBUG nova.network.neutron [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.788960] env[69992]: DEBUG nova.network.neutron [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Successfully created port: 942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1245.833937] env[69992]: DEBUG nova.network.neutron [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1246.060020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "ae681491-c03e-486f-b763-0ebfa4dcd669" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.060020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.064835] env[69992]: DEBUG nova.network.neutron [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Updating instance_info_cache with network_info: [{"id": "e7144a56-29c2-4983-b57d-7e1b28077d55", "address": "fa:16:3e:6e:ef:74", "network": {"id": "ff21bc96-c4d9-426c-aca0-d7cc99d03e86", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-484248836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24eeae734354423c8b30683ab02b3984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7144a56-29", "ovs_interfaceid": "e7144a56-29c2-4983-b57d-7e1b28077d55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.152881] env[69992]: DEBUG nova.scheduler.client.report [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1246.268789] env[69992]: DEBUG nova.network.neutron [-] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.389208] env[69992]: DEBUG nova.network.neutron [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Updating instance_info_cache with network_info: [{"id": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "address": "fa:16:3e:3d:53:32", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap445cdcf8-38", "ovs_interfaceid": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.426096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.426387] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.562226] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1246.567740] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Releasing lock "refresh_cache-7fa33d98-20b7-4162-a354-24cfea17701f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.571019] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Instance network_info: |[{"id": "e7144a56-29c2-4983-b57d-7e1b28077d55", "address": "fa:16:3e:6e:ef:74", "network": {"id": "ff21bc96-c4d9-426c-aca0-d7cc99d03e86", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-484248836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24eeae734354423c8b30683ab02b3984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7144a56-29", "ovs_interfaceid": "e7144a56-29c2-4983-b57d-7e1b28077d55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1246.571019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:ef:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f1b07b1-e4e5-4842-9090-07fb2c3e124b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7144a56-29c2-4983-b57d-7e1b28077d55', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1246.576527] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Creating folder: Project (24eeae734354423c8b30683ab02b3984). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1246.578303] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-431b12eb-a41d-451f-af2a-699d31d81f7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.591014] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Created folder: Project (24eeae734354423c8b30683ab02b3984) in parent group-v581821. [ 1246.591226] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Creating folder: Instances. Parent ref: group-v582051. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1246.591473] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db50e149-191a-4772-b51c-4dc98774c98f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.607020] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Created folder: Instances in parent group-v582051. [ 1246.607020] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1246.607020] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1246.607020] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe8ddaab-d82f-45b4-84ff-603042d35fc7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.624856] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1246.624856] env[69992]: value = "task-2897657" [ 1246.624856] env[69992]: _type = "Task" [ 1246.624856] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.634402] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897657, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.658544] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.660857] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.600s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.662530] env[69992]: INFO nova.compute.claims [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.668218] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1246.686160] env[69992]: INFO nova.scheduler.client.report [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Deleted allocations for instance 7fc7c481-75e8-40f2-a971-752ce6dde59b [ 1246.710760] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1246.711075] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1246.711269] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1246.711462] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1246.711612] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1246.711762] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1246.712232] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1246.712458] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1246.712650] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1246.712820] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1246.712998] env[69992]: DEBUG nova.virt.hardware [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1246.713936] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6003eee5-5cbb-450d-99f5-f9fafe281ef2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.723756] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb8a108-dd5b-480c-9d3e-e7accba3e3aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.771966] env[69992]: INFO nova.compute.manager [-] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Took 1.72 seconds to deallocate network for instance. [ 1246.841488] env[69992]: DEBUG nova.compute.manager [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Received event network-changed-e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1246.841742] env[69992]: DEBUG nova.compute.manager [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Refreshing instance network info cache due to event network-changed-e7144a56-29c2-4983-b57d-7e1b28077d55. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1246.841945] env[69992]: DEBUG oslo_concurrency.lockutils [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] Acquiring lock "refresh_cache-7fa33d98-20b7-4162-a354-24cfea17701f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.842119] env[69992]: DEBUG oslo_concurrency.lockutils [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] Acquired lock "refresh_cache-7fa33d98-20b7-4162-a354-24cfea17701f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.842291] env[69992]: DEBUG nova.network.neutron [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Refreshing network info cache for port e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1246.896996] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.930935] env[69992]: INFO nova.compute.manager [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Detaching volume 78d5f802-a2b3-4c3d-9484-3ea2397e9ab5 [ 1246.973433] env[69992]: INFO nova.virt.block_device [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Attempting to driver detach volume 78d5f802-a2b3-4c3d-9484-3ea2397e9ab5 from mountpoint /dev/sdb [ 1246.973671] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1246.973858] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582033', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'name': 'volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b4da2ab-d026-45d8-8234-79ddd84d5cbb', 'attached_at': '', 'detached_at': '', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'serial': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1246.975080] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d911b79b-890c-4921-9411-849a907c0c20 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.999646] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd6148d-0a56-4f87-90de-cbc8718ba146 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.006701] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4215e821-9782-42be-a6eb-50c100aad209 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.026090] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb0114f-0f54-45f0-ab80-599ffad851da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.039950] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] The volume has not been displaced from its original location: [datastore2] volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5/volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1247.045234] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfiguring VM instance instance-00000037 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1247.045522] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9160895e-2510-4582-8b02-43a123bcd63e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.063492] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1247.063492] env[69992]: value = "task-2897658" [ 1247.063492] env[69992]: _type = "Task" [ 1247.063492] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.074447] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897658, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.087191] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.136872] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897657, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.189685] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1247.190627] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43f937c-7a32-46bf-b164-5bedee12f502 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.196144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-af4d18d5-121d-42d8-a26b-4bfb209ebe21 tempest-ServersTestFqdnHostnames-1300927320 tempest-ServersTestFqdnHostnames-1300927320-project-member] Lock "7fc7c481-75e8-40f2-a971-752ce6dde59b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.385s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.200345] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1247.201017] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e8f0c52-16a8-480b-aff5-593d71d1936a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.278675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.460475] env[69992]: DEBUG nova.network.neutron [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Successfully updated port: 942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1247.568846] env[69992]: DEBUG nova.network.neutron [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Updated VIF entry in instance network info cache for port e7144a56-29c2-4983-b57d-7e1b28077d55. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.569254] env[69992]: DEBUG nova.network.neutron [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Updating instance_info_cache with network_info: [{"id": "e7144a56-29c2-4983-b57d-7e1b28077d55", "address": "fa:16:3e:6e:ef:74", "network": {"id": "ff21bc96-c4d9-426c-aca0-d7cc99d03e86", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-484248836-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "24eeae734354423c8b30683ab02b3984", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f1b07b1-e4e5-4842-9090-07fb2c3e124b", "external-id": "nsx-vlan-transportzone-646", "segmentation_id": 646, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7144a56-29", "ovs_interfaceid": "e7144a56-29c2-4983-b57d-7e1b28077d55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.576359] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.635908] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897657, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.716579] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1247.716579] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1247.716579] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleting the datastore file [datastore1] c4bd5585-d917-4d92-9ce8-fa1950944f25 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.716761] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85d8bde1-a1fb-4dd4-97a6-8b5f0ff184b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.723358] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1247.723358] env[69992]: value = "task-2897660" [ 1247.723358] env[69992]: _type = "Task" [ 1247.723358] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.731637] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.968066] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.968066] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.968164] env[69992]: DEBUG nova.network.neutron [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1247.999610] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d29c8d8-abd8-495b-b57c-10dbc9663b39 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.009087] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54b8112-ccf1-4c5c-93ec-b5040ec0b26f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.043280] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f003fe63-3134-4431-899c-2289f8f4d332 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.052156] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe44b3d-75f1-4649-94df-9993097ab08c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.068166] env[69992]: DEBUG nova.compute.provider_tree [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.072298] env[69992]: DEBUG oslo_concurrency.lockutils [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] Releasing lock "refresh_cache-7fa33d98-20b7-4162-a354-24cfea17701f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.072529] env[69992]: DEBUG nova.compute.manager [req-a15de50c-c035-4e05-ad3e-bb2eaf1f2f88 req-2b9fd05a-4a66-4949-bbde-a49dcd202b26 service nova] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Received event network-vif-deleted-48ef557e-b0bc-4415-84c9-60b9146b4ff7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1248.077741] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897658, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.136984] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897657, 'name': CreateVM_Task, 'duration_secs': 1.38968} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.137099] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1248.137736] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.137904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.138258] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1248.138518] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47ab4663-a262-4840-82da-bdef13d9c83a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.142954] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1248.142954] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5263b308-956c-d9cf-c83f-b51472e3ed93" [ 1248.142954] env[69992]: _type = "Task" [ 1248.142954] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.151089] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5263b308-956c-d9cf-c83f-b51472e3ed93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.232553] env[69992]: DEBUG oslo_vmware.api [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151863} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.232868] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1248.232982] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1248.233177] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1248.254130] env[69992]: INFO nova.scheduler.client.report [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocations for instance c4bd5585-d917-4d92-9ce8-fa1950944f25 [ 1248.507111] env[69992]: DEBUG nova.network.neutron [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1248.573665] env[69992]: DEBUG nova.scheduler.client.report [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.579894] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897658, 'name': ReconfigVM_Task, 'duration_secs': 1.021948} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.580385] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Reconfigured VM instance instance-00000037 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1248.585058] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c85d1730-355f-4532-bb49-d6fcbedd0387 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.601691] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1248.601691] env[69992]: value = "task-2897661" [ 1248.601691] env[69992]: _type = "Task" [ 1248.601691] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.613202] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.653746] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5263b308-956c-d9cf-c83f-b51472e3ed93, 'name': SearchDatastore_Task, 'duration_secs': 0.009608} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.654061] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.654307] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1248.654550] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.654695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.654875] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1248.655155] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aaede261-8898-4a53-96b6-fbeded8a2d7e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.665126] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1248.665126] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1248.665126] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01a2be92-abb6-4b05-b3c1-127ab61063b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.670189] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1248.670189] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52234e1d-f558-2cdf-7320-c610018e1d37" [ 1248.670189] env[69992]: _type = "Task" [ 1248.670189] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.679434] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52234e1d-f558-2cdf-7320-c610018e1d37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.718037] env[69992]: DEBUG nova.network.neutron [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updating instance_info_cache with network_info: [{"id": "942293fd-c866-4331-b9d4-f667536a039b", "address": "fa:16:3e:3c:2b:0e", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942293fd-c8", "ovs_interfaceid": "942293fd-c866-4331-b9d4-f667536a039b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.758312] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.074528] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Received event network-vif-unplugged-445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.074677] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Acquiring lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.074883] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.075177] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.075368] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] No waiting events found dispatching network-vif-unplugged-445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1249.075529] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Received event network-vif-unplugged-445cdcf8-38ea-4465-a568-4f4e63c483dd for instance with task_state deleting. {{(pid=69992) _process_instance_event /opt/stack/nova/nova/compute/manager.py:11515}} [ 1249.075688] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Received event network-changed-445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1249.078073] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Refreshing instance network info cache due to event network-changed-445cdcf8-38ea-4465-a568-4f4e63c483dd. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1249.078073] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Acquiring lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.078073] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Acquired lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.078073] env[69992]: DEBUG nova.network.neutron [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Refreshing network info cache for port 445cdcf8-38ea-4465-a568-4f4e63c483dd {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.081496] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.081956] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1249.086946] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.753s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.088343] env[69992]: INFO nova.compute.claims [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.113740] env[69992]: DEBUG oslo_vmware.api [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897661, 'name': ReconfigVM_Task, 'duration_secs': 0.134876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.114099] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582033', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'name': 'volume-78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b4da2ab-d026-45d8-8234-79ddd84d5cbb', 'attached_at': '', 'detached_at': '', 'volume_id': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5', 'serial': '78d5f802-a2b3-4c3d-9484-3ea2397e9ab5'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1249.182487] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52234e1d-f558-2cdf-7320-c610018e1d37, 'name': SearchDatastore_Task, 'duration_secs': 0.008293} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.183349] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b1636ec-1740-4cca-a5eb-dcb3f2f8271c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.188996] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1249.188996] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fee6d7-f170-29b6-85f6-cf4337023b19" [ 1249.188996] env[69992]: _type = "Task" [ 1249.188996] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.197975] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fee6d7-f170-29b6-85f6-cf4337023b19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.220842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Releasing lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.221252] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Instance network_info: |[{"id": "942293fd-c866-4331-b9d4-f667536a039b", "address": "fa:16:3e:3c:2b:0e", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942293fd-c8", "ovs_interfaceid": "942293fd-c866-4331-b9d4-f667536a039b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1249.222032] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:2b:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '942293fd-c866-4331-b9d4-f667536a039b', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1249.233015] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Creating folder: Project (34923aa9da1d46cc9d22d569d9428781). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1249.233015] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25d140c4-e0e1-42a7-aca5-bad3db3ba3c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.241562] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Created folder: Project (34923aa9da1d46cc9d22d569d9428781) in parent group-v581821. [ 1249.242390] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Creating folder: Instances. Parent ref: group-v582054. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1249.242390] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-444c588e-f8c9-4084-816c-be622789ee92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.251253] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Created folder: Instances in parent group-v582054. [ 1249.251503] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1249.251696] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1249.251933] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8668e3a7-dc9b-44bd-b78a-d0942abf1b5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.272610] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1249.272610] env[69992]: value = "task-2897664" [ 1249.272610] env[69992]: _type = "Task" [ 1249.272610] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.280790] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897664, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.419601] env[69992]: DEBUG oslo_concurrency.lockutils [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.589042] env[69992]: DEBUG nova.compute.utils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1249.590534] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1249.590718] env[69992]: DEBUG nova.network.neutron [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1249.677821] env[69992]: DEBUG nova.objects.instance [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'flavor' on Instance uuid 1b4da2ab-d026-45d8-8234-79ddd84d5cbb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1249.685229] env[69992]: DEBUG nova.policy [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '044902c6075d41739188628ba5ebd58d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b51b8195c4e7418cbdaa66aa5e5aff5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1249.705929] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fee6d7-f170-29b6-85f6-cf4337023b19, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.706774] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.707093] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7fa33d98-20b7-4162-a354-24cfea17701f/7fa33d98-20b7-4162-a354-24cfea17701f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1249.707644] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f491b56d-b7d4-4757-9133-756d504ef181 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.718128] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1249.718128] env[69992]: value = "task-2897665" [ 1249.718128] env[69992]: _type = "Task" [ 1249.718128] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.727441] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.784099] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897664, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.914681] env[69992]: DEBUG nova.network.neutron [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Updated VIF entry in instance network info cache for port 445cdcf8-38ea-4465-a568-4f4e63c483dd. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1249.915100] env[69992]: DEBUG nova.network.neutron [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Updating instance_info_cache with network_info: [{"id": "445cdcf8-38ea-4465-a568-4f4e63c483dd", "address": "fa:16:3e:3d:53:32", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": null, "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap445cdcf8-38", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.081661] env[69992]: DEBUG nova.network.neutron [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Successfully created port: 15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1250.094596] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1250.230405] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435982} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.233063] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 7fa33d98-20b7-4162-a354-24cfea17701f/7fa33d98-20b7-4162-a354-24cfea17701f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1250.233340] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1250.234071] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a700ab1-c277-4de3-98ce-1b7df2c52897 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.244108] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1250.244108] env[69992]: value = "task-2897666" [ 1250.244108] env[69992]: _type = "Task" [ 1250.244108] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.256566] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.295362] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897664, 'name': CreateVM_Task, 'duration_secs': 0.726196} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.295362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1250.295362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.295362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.295362] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1250.297045] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5418964-6c8b-4757-b979-ae46e02cb0f5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.302038] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1250.302038] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5211fb07-44ab-0b5a-31fe-82888d70517d" [ 1250.302038] env[69992]: _type = "Task" [ 1250.302038] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.312254] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5211fb07-44ab-0b5a-31fe-82888d70517d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.420653] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Releasing lock "refresh_cache-c4bd5585-d917-4d92-9ce8-fa1950944f25" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.421237] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Received event network-vif-plugged-942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.421237] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.421329] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Lock "57702674-4c96-4577-a93f-24ecffebb3a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.421447] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Lock "57702674-4c96-4577-a93f-24ecffebb3a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.421605] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] No waiting events found dispatching network-vif-plugged-942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1250.422230] env[69992]: WARNING nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Received unexpected event network-vif-plugged-942293fd-c866-4331-b9d4-f667536a039b for instance with vm_state building and task_state spawning. [ 1250.422230] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Received event network-changed-942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.422230] env[69992]: DEBUG nova.compute.manager [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Refreshing instance network info cache due to event network-changed-942293fd-c866-4331-b9d4-f667536a039b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1250.422399] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Acquiring lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.422399] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Acquired lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.423533] env[69992]: DEBUG nova.network.neutron [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Refreshing network info cache for port 942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1250.478980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfe6af6-69bd-4a21-a4a1-6ebffecaf554 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.488460] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc8903a-d24a-437b-bc05-362586e24e32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.519613] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935b109b-29e1-47c0-b55f-e59d409b72aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.527658] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64872367-2873-4d1c-93fd-1b18c60cec6d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.540943] env[69992]: DEBUG nova.compute.provider_tree [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.690224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21e68335-26d0-4cd9-9096-64322fdf853e tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.264s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.756929] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064098} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.757202] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1250.758012] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44032c99-5578-4309-8ec3-f900ff3aab41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.781701] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 7fa33d98-20b7-4162-a354-24cfea17701f/7fa33d98-20b7-4162-a354-24cfea17701f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1250.781976] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-696999c5-1885-44d6-bded-ed5d1269f9d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.801872] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1250.801872] env[69992]: value = "task-2897667" [ 1250.801872] env[69992]: _type = "Task" [ 1250.801872] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.812273] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897667, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.816083] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5211fb07-44ab-0b5a-31fe-82888d70517d, 'name': SearchDatastore_Task, 'duration_secs': 0.009319} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.816365] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.816599] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1250.816831] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.816977] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.817168] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1250.817436] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0ee9720-9154-4cf3-a433-cfaeb4f70198 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.825846] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1250.826039] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1250.826738] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-587680ce-a63b-4223-8291-004dbd90eb8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.831465] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1250.831465] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b2dc71-050f-7312-d937-02dbf1575fd7" [ 1250.831465] env[69992]: _type = "Task" [ 1250.831465] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.838839] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b2dc71-050f-7312-d937-02dbf1575fd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.044463] env[69992]: DEBUG nova.scheduler.client.report [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1251.104444] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1251.121871] env[69992]: DEBUG nova.network.neutron [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updated VIF entry in instance network info cache for port 942293fd-c866-4331-b9d4-f667536a039b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1251.122250] env[69992]: DEBUG nova.network.neutron [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updating instance_info_cache with network_info: [{"id": "942293fd-c866-4331-b9d4-f667536a039b", "address": "fa:16:3e:3c:2b:0e", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942293fd-c8", "ovs_interfaceid": "942293fd-c866-4331-b9d4-f667536a039b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.130837] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1251.130967] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1251.131105] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1251.131370] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1251.131532] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1251.131681] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1251.131883] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1251.132055] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1251.132228] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1251.132394] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1251.132580] env[69992]: DEBUG nova.virt.hardware [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1251.133423] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4c0afb-29bd-4edd-942c-565dc6fefdb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.142190] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c0af82-448f-4f49-ac20-6c16b6acbb08 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.187666] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.188040] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.188288] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.188733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.188733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.190863] env[69992]: INFO nova.compute.manager [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Terminating instance [ 1251.313917] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897667, 'name': ReconfigVM_Task, 'duration_secs': 0.270795} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.314230] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 7fa33d98-20b7-4162-a354-24cfea17701f/7fa33d98-20b7-4162-a354-24cfea17701f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.314881] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3614d4d6-c5e4-4cb7-9fea-f378a4b46552 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.322355] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1251.322355] env[69992]: value = "task-2897668" [ 1251.322355] env[69992]: _type = "Task" [ 1251.322355] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.334820] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897668, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.346238] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b2dc71-050f-7312-d937-02dbf1575fd7, 'name': SearchDatastore_Task, 'duration_secs': 0.009678} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.347365] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94e94dca-ddb0-4b22-a8e3-58c640cc6c6a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.353638] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1251.353638] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c84b0-b766-7508-b3da-55c803262487" [ 1251.353638] env[69992]: _type = "Task" [ 1251.353638] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.364158] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524c84b0-b766-7508-b3da-55c803262487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.549883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.550238] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1251.553012] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.810s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.553261] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.555261] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.287s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.555480] env[69992]: DEBUG nova.objects.instance [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lazy-loading 'resources' on Instance uuid 27492ef7-8258-4001-b3b3-5bcb94e12c1f {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.558674] env[69992]: DEBUG nova.compute.manager [req-864eb440-7d7c-43ac-8c7c-fb9d7049f8d0 req-596f6e36-3e0d-41b7-89ff-131cab2c2f91 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Received event network-vif-plugged-15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1251.562022] env[69992]: DEBUG oslo_concurrency.lockutils [req-864eb440-7d7c-43ac-8c7c-fb9d7049f8d0 req-596f6e36-3e0d-41b7-89ff-131cab2c2f91 service nova] Acquiring lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.562022] env[69992]: DEBUG oslo_concurrency.lockutils [req-864eb440-7d7c-43ac-8c7c-fb9d7049f8d0 req-596f6e36-3e0d-41b7-89ff-131cab2c2f91 service nova] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.562022] env[69992]: DEBUG oslo_concurrency.lockutils [req-864eb440-7d7c-43ac-8c7c-fb9d7049f8d0 req-596f6e36-3e0d-41b7-89ff-131cab2c2f91 service nova] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.562022] env[69992]: DEBUG nova.compute.manager [req-864eb440-7d7c-43ac-8c7c-fb9d7049f8d0 req-596f6e36-3e0d-41b7-89ff-131cab2c2f91 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] No waiting events found dispatching network-vif-plugged-15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1251.562022] env[69992]: WARNING nova.compute.manager [req-864eb440-7d7c-43ac-8c7c-fb9d7049f8d0 req-596f6e36-3e0d-41b7-89ff-131cab2c2f91 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Received unexpected event network-vif-plugged-15455be6-d2df-46a9-bd15-7872eadb1ab6 for instance with vm_state building and task_state spawning. [ 1251.583348] env[69992]: INFO nova.scheduler.client.report [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocations for instance 9df7b187-e579-41b0-9d24-be2a1ae93079 [ 1251.628793] env[69992]: DEBUG oslo_concurrency.lockutils [req-d0216dc4-5bdf-4bbe-aa11-104101e46c5f req-4956df91-2e4a-4cd1-aa20-d60810e80df2 service nova] Releasing lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.633743] env[69992]: DEBUG nova.network.neutron [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Successfully updated port: 15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1251.694438] env[69992]: DEBUG nova.compute.manager [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1251.694656] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1251.695557] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5238cfdb-fd62-41c4-bd28-b3bd08aae1d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.703935] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1251.704130] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bb3b72b-6a99-405d-ba16-4552a8e98513 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.709439] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1251.709439] env[69992]: value = "task-2897669" [ 1251.709439] env[69992]: _type = "Task" [ 1251.709439] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.716641] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.832140] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897668, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.862862] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524c84b0-b766-7508-b3da-55c803262487, 'name': SearchDatastore_Task, 'duration_secs': 0.079299} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.863133] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.863399] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 57702674-4c96-4577-a93f-24ecffebb3a7/57702674-4c96-4577-a93f-24ecffebb3a7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1251.863643] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb158611-96fb-4001-89a9-c56680d5f176 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.870596] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1251.870596] env[69992]: value = "task-2897670" [ 1251.870596] env[69992]: _type = "Task" [ 1251.870596] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.877784] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897670, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.059407] env[69992]: DEBUG nova.compute.utils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1252.064385] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1252.064385] env[69992]: DEBUG nova.network.neutron [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1252.091349] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b34239e4-8999-45fd-b54c-61b6f50d8f79 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "9df7b187-e579-41b0-9d24-be2a1ae93079" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.866s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.109626] env[69992]: DEBUG nova.policy [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdc7f71c9c4b4d40bf40b631c24b5ee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17ab89c6cf054418a4dd1a0e61b3a5e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1252.140369] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.140536] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.140844] env[69992]: DEBUG nova.network.neutron [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.220976] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.340344] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897668, 'name': Rename_Task, 'duration_secs': 0.833526} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.344606] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1252.345287] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a51e67ad-b651-421d-8cba-8c5f7b8f6686 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.356230] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1252.356230] env[69992]: value = "task-2897671" [ 1252.356230] env[69992]: _type = "Task" [ 1252.356230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.373607] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.389657] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897670, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.443196] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d284f4a1-8c53-427a-90c5-ec8d89554154 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.452185] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4848863-9edf-49f6-9085-86d52c8ff231 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.456202] env[69992]: DEBUG nova.network.neutron [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Successfully created port: 0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1252.486307] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8475bcbc-aa12-4b24-b265-570b791010f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.493981] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042cfc7a-70e7-483e-b195-5fe9b6408753 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.510019] env[69992]: DEBUG nova.compute.provider_tree [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.566945] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1252.679226] env[69992]: DEBUG nova.network.neutron [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1252.719036] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897669, 'name': PowerOffVM_Task, 'duration_secs': 0.623433} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.719314] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1252.719487] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.719737] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34442197-dfd2-4d2d-a8c2-71d464757963 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.798025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1252.798025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1252.798025] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleting the datastore file [datastore2] 1b4da2ab-d026-45d8-8234-79ddd84d5cbb {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.798271] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f279881-05d1-4890-bd9e-76c8be488e88 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.804781] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1252.804781] env[69992]: value = "task-2897673" [ 1252.804781] env[69992]: _type = "Task" [ 1252.804781] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.814430] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.828086] env[69992]: DEBUG nova.network.neutron [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [{"id": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "address": "fa:16:3e:7b:a7:d0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15455be6-d2", "ovs_interfaceid": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.866293] env[69992]: DEBUG oslo_vmware.api [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897671, 'name': PowerOnVM_Task, 'duration_secs': 0.489658} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.866542] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1252.866740] env[69992]: INFO nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Took 8.70 seconds to spawn the instance on the hypervisor. [ 1252.866916] env[69992]: DEBUG nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1252.867689] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f09011-de90-4141-8719-32adb4b5ee9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.884535] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897670, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598279} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.884766] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 57702674-4c96-4577-a93f-24ecffebb3a7/57702674-4c96-4577-a93f-24ecffebb3a7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1252.884972] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1252.885221] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b76f73a3-1a9d-4153-95ca-d25286071525 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.892622] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1252.892622] env[69992]: value = "task-2897674" [ 1252.892622] env[69992]: _type = "Task" [ 1252.892622] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.900442] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897674, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.013647] env[69992]: DEBUG nova.scheduler.client.report [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1253.317442] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.332284] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.332704] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Instance network_info: |[{"id": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "address": "fa:16:3e:7b:a7:d0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15455be6-d2", "ovs_interfaceid": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1253.333238] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:a7:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15455be6-d2df-46a9-bd15-7872eadb1ab6', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.342512] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.342786] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1253.342912] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f51b0690-9ffd-403f-9ba5-68c8f2a9ee12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.363380] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.363380] env[69992]: value = "task-2897675" [ 1253.363380] env[69992]: _type = "Task" [ 1253.363380] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.370588] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897675, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.387031] env[69992]: INFO nova.compute.manager [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Took 44.32 seconds to build instance. [ 1253.401896] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897674, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072413} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.402545] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1253.403920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f314ac26-f294-450f-8c25-f58e0021e402 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.427125] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 57702674-4c96-4577-a93f-24ecffebb3a7/57702674-4c96-4577-a93f-24ecffebb3a7.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1253.427741] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-060a592f-3d02-4a81-8895-6aa1014cd5dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.449603] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1253.449603] env[69992]: value = "task-2897676" [ 1253.449603] env[69992]: _type = "Task" [ 1253.449603] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.461257] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897676, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.521107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.524573] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.642s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.524674] env[69992]: DEBUG nova.objects.instance [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1253.548424] env[69992]: INFO nova.scheduler.client.report [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Deleted allocations for instance 27492ef7-8258-4001-b3b3-5bcb94e12c1f [ 1253.573802] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1253.607468] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1253.607715] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1253.607870] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1253.608061] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1253.608751] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1253.608751] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1253.608751] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1253.608751] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1253.609109] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1253.609109] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1253.609263] env[69992]: DEBUG nova.virt.hardware [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1253.610331] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6fd6a8-a815-443c-84e0-a860404c1d94 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.620175] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6577a2ab-07ab-4b35-bc5e-230b0b33c7b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.640908] env[69992]: DEBUG nova.compute.manager [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Received event network-changed-15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1253.641022] env[69992]: DEBUG nova.compute.manager [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Refreshing instance network info cache due to event network-changed-15455be6-d2df-46a9-bd15-7872eadb1ab6. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1253.641270] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] Acquiring lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.641417] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] Acquired lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.641581] env[69992]: DEBUG nova.network.neutron [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Refreshing network info cache for port 15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1253.815740] env[69992]: DEBUG oslo_vmware.api [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.918337} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.816142] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.816304] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.816490] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.816666] env[69992]: INFO nova.compute.manager [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1253.816907] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.817144] env[69992]: DEBUG nova.compute.manager [-] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1253.817235] env[69992]: DEBUG nova.network.neutron [-] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.873473] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897675, 'name': CreateVM_Task, 'duration_secs': 0.391039} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.873691] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1253.874379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.874552] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.874875] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1253.875166] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa78b9ac-1ac5-4f83-873e-3fa0cc0914b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.879696] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1253.879696] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52892452-e021-118a-e1d7-c9fe08003d68" [ 1253.879696] env[69992]: _type = "Task" [ 1253.879696] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.891770] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6d2f12b4-e7f6-4949-a89d-14f8ae28ed7e tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "7fa33d98-20b7-4162-a354-24cfea17701f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.836s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.902191] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52892452-e021-118a-e1d7-c9fe08003d68, 'name': SearchDatastore_Task, 'duration_secs': 0.01238} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.904971] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.905225] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.905464] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.905612] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.905790] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.906633] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d318f2b-2857-40e6-a1c0-ac9af2589132 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.915954] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.916153] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1253.917170] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bd6aebf-f28f-468a-bc09-952742dda3a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.922372] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1253.922372] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fae3c1-f427-2467-4a66-718b6aa81ed0" [ 1253.922372] env[69992]: _type = "Task" [ 1253.922372] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.931223] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fae3c1-f427-2467-4a66-718b6aa81ed0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.959998] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.058489] env[69992]: DEBUG oslo_concurrency.lockutils [None req-384b7750-7996-4673-9e93-401848e1abd0 tempest-ServersAdminTestJSON-40334490 tempest-ServersAdminTestJSON-40334490-project-member] Lock "27492ef7-8258-4001-b3b3-5bcb94e12c1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.306s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.136994] env[69992]: DEBUG nova.network.neutron [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Successfully updated port: 0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1254.395483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "7fa33d98-20b7-4162-a354-24cfea17701f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.395483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "7fa33d98-20b7-4162-a354-24cfea17701f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.395483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "7fa33d98-20b7-4162-a354-24cfea17701f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.395483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "7fa33d98-20b7-4162-a354-24cfea17701f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.395483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "7fa33d98-20b7-4162-a354-24cfea17701f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.396924] env[69992]: INFO nova.compute.manager [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Terminating instance [ 1254.436357] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fae3c1-f427-2467-4a66-718b6aa81ed0, 'name': SearchDatastore_Task, 'duration_secs': 0.013444} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.437764] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7321268c-df44-406e-a55d-d99e50f48f39 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.444537] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1254.444537] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5239265b-e005-ab56-a7c5-bd5e023a63ff" [ 1254.444537] env[69992]: _type = "Task" [ 1254.444537] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.466432] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5239265b-e005-ab56-a7c5-bd5e023a63ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009754} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.471766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.472330] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4/31109fbd-ebc0-422d-a705-7d0e59d4bbb4.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1254.472800] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.473167] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-650960e4-97c2-4118-9a2f-e0f852844630 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.481465] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1254.481465] env[69992]: value = "task-2897677" [ 1254.481465] env[69992]: _type = "Task" [ 1254.481465] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.494087] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.496013] env[69992]: DEBUG nova.network.neutron [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updated VIF entry in instance network info cache for port 15455be6-d2df-46a9-bd15-7872eadb1ab6. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1254.497157] env[69992]: DEBUG nova.network.neutron [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [{"id": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "address": "fa:16:3e:7b:a7:d0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15455be6-d2", "ovs_interfaceid": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.536525] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f91a1cc2-044d-47fa-a22b-aa99bc87d4f8 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.539741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.020s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.541274] env[69992]: INFO nova.compute.claims [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1254.641531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-dedba037-48a7-4083-925d-5f34e2a27362" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.641531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-dedba037-48a7-4083-925d-5f34e2a27362" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.641531] env[69992]: DEBUG nova.network.neutron [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1254.902120] env[69992]: DEBUG nova.compute.manager [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1254.902479] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1254.903679] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f42af71-6536-4d81-a463-1f70fd594040 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.913511] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1254.913858] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1afe2af-6748-4f59-8ede-2370ef32fc7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.921370] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1254.921370] env[69992]: value = "task-2897678" [ 1254.921370] env[69992]: _type = "Task" [ 1254.921370] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.936127] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.964562] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897676, 'name': ReconfigVM_Task, 'duration_secs': 1.369689} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.964893] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 57702674-4c96-4577-a93f-24ecffebb3a7/57702674-4c96-4577-a93f-24ecffebb3a7.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1254.965602] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-486bd9da-8dbb-43bd-8aab-f84e4207018a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.973751] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1254.973751] env[69992]: value = "task-2897679" [ 1254.973751] env[69992]: _type = "Task" [ 1254.973751] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.983603] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897679, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.993377] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897677, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.000295] env[69992]: DEBUG oslo_concurrency.lockutils [req-5bbf6b14-9f43-47a4-911f-872f1df25dcd req-967889e4-753e-4b4c-8602-be192a120ab6 service nova] Releasing lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.053086] env[69992]: DEBUG nova.network.neutron [-] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.204709] env[69992]: DEBUG nova.network.neutron [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1255.433204] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897678, 'name': PowerOffVM_Task, 'duration_secs': 0.361657} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.433595] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1255.433672] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1255.433912] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-302f7354-1261-461d-ba7f-52b6b34a3392 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.441690] env[69992]: DEBUG nova.network.neutron [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Updating instance_info_cache with network_info: [{"id": "0cb0f0fd-a197-4688-a99f-231754ad8820", "address": "fa:16:3e:e1:7a:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cb0f0fd-a1", "ovs_interfaceid": "0cb0f0fd-a197-4688-a99f-231754ad8820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.492028] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897679, 'name': Rename_Task, 'duration_secs': 0.362134} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.492211] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1255.492514] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7a77434-6c4a-4f36-824c-760424de7b04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.500888] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768234} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.501868] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4/31109fbd-ebc0-422d-a705-7d0e59d4bbb4.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1255.502140] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1255.502602] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ee79165-09af-4759-9389-12585b9bf460 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.506659] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1255.506659] env[69992]: value = "task-2897681" [ 1255.506659] env[69992]: _type = "Task" [ 1255.506659] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.508992] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1255.509210] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1255.509454] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Deleting the datastore file [datastore2] 7fa33d98-20b7-4162-a354-24cfea17701f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1255.512748] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39ca60fa-4ec9-4aa3-8065-b2d0faa38d23 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.516870] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1255.516870] env[69992]: value = "task-2897682" [ 1255.516870] env[69992]: _type = "Task" [ 1255.516870] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.520198] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.525370] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for the task: (returnval){ [ 1255.525370] env[69992]: value = "task-2897683" [ 1255.525370] env[69992]: _type = "Task" [ 1255.525370] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.534210] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.539704] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.558644] env[69992]: INFO nova.compute.manager [-] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Took 1.74 seconds to deallocate network for instance. [ 1255.721336] env[69992]: DEBUG nova.compute.manager [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Received event network-vif-plugged-0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1255.721336] env[69992]: DEBUG oslo_concurrency.lockutils [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] Acquiring lock "dedba037-48a7-4083-925d-5f34e2a27362-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.721336] env[69992]: DEBUG oslo_concurrency.lockutils [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] Lock "dedba037-48a7-4083-925d-5f34e2a27362-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.721837] env[69992]: DEBUG oslo_concurrency.lockutils [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] Lock "dedba037-48a7-4083-925d-5f34e2a27362-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.722292] env[69992]: DEBUG nova.compute.manager [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] No waiting events found dispatching network-vif-plugged-0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1255.722636] env[69992]: WARNING nova.compute.manager [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Received unexpected event network-vif-plugged-0cb0f0fd-a197-4688-a99f-231754ad8820 for instance with vm_state building and task_state spawning. [ 1255.722949] env[69992]: DEBUG nova.compute.manager [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Received event network-changed-0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1255.723811] env[69992]: DEBUG nova.compute.manager [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Refreshing instance network info cache due to event network-changed-0cb0f0fd-a197-4688-a99f-231754ad8820. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1255.723811] env[69992]: DEBUG oslo_concurrency.lockutils [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] Acquiring lock "refresh_cache-dedba037-48a7-4083-925d-5f34e2a27362" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.921882] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0493b96-8663-4ad4-a6c5-7dc55b2baec6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.930914] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774e0b2a-6aa4-4230-b932-36de8ab13c1e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.964349] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-dedba037-48a7-4083-925d-5f34e2a27362" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.965029] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance network_info: |[{"id": "0cb0f0fd-a197-4688-a99f-231754ad8820", "address": "fa:16:3e:e1:7a:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cb0f0fd-a1", "ovs_interfaceid": "0cb0f0fd-a197-4688-a99f-231754ad8820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1255.965701] env[69992]: DEBUG oslo_concurrency.lockutils [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] Acquired lock "refresh_cache-dedba037-48a7-4083-925d-5f34e2a27362" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.965798] env[69992]: DEBUG nova.network.neutron [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Refreshing network info cache for port 0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1255.967096] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:7a:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cb0f0fd-a197-4688-a99f-231754ad8820', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.975470] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1255.976196] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40af389-e697-4e25-8a54-753a2ee5c34a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.979717] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1255.979957] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c4ded7a-32c0-4a26-a702-144a60c8d44f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.003387] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17a5c63-bb25-4adf-8d3a-07e458a1c0b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.007428] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1256.007428] env[69992]: value = "task-2897684" [ 1256.007428] env[69992]: _type = "Task" [ 1256.007428] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.020728] env[69992]: DEBUG nova.compute.provider_tree [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.031915] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897684, 'name': CreateVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.038817] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897681, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.042423] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.15776} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.042697] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1256.043472] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47379ec7-d2f8-4ca5-aa1a-60feb70d7202 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.048769] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.069445] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4/31109fbd-ebc0-422d-a705-7d0e59d4bbb4.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.069733] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be9af7ad-4ccb-44b8-8d9e-6a9d558f3afa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.085209] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.090585] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1256.090585] env[69992]: value = "task-2897685" [ 1256.090585] env[69992]: _type = "Task" [ 1256.090585] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.098707] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.519774] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897684, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.527750] env[69992]: DEBUG nova.scheduler.client.report [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1256.531125] env[69992]: DEBUG oslo_vmware.api [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897681, 'name': PowerOnVM_Task, 'duration_secs': 0.901373} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.535102] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1256.535102] env[69992]: INFO nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1256.535102] env[69992]: DEBUG nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1256.535102] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4ddf71-97bc-45c8-9ca6-1726835135a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.546575] env[69992]: DEBUG oslo_vmware.api [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Task: {'id': task-2897683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.576793} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.547106] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1256.547306] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1256.547534] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1256.547644] env[69992]: INFO nova.compute.manager [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1256.547885] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1256.548089] env[69992]: DEBUG nova.compute.manager [-] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1256.548223] env[69992]: DEBUG nova.network.neutron [-] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1256.603511] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897685, 'name': ReconfigVM_Task, 'duration_secs': 0.351792} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.603771] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4/31109fbd-ebc0-422d-a705-7d0e59d4bbb4.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1256.604751] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14f5aec2-78b0-461f-8162-4cb86d8ae24e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.610553] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1256.610553] env[69992]: value = "task-2897686" [ 1256.610553] env[69992]: _type = "Task" [ 1256.610553] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.618761] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897686, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.806889] env[69992]: DEBUG nova.network.neutron [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Updated VIF entry in instance network info cache for port 0cb0f0fd-a197-4688-a99f-231754ad8820. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1256.807317] env[69992]: DEBUG nova.network.neutron [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Updating instance_info_cache with network_info: [{"id": "0cb0f0fd-a197-4688-a99f-231754ad8820", "address": "fa:16:3e:e1:7a:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cb0f0fd-a1", "ovs_interfaceid": "0cb0f0fd-a197-4688-a99f-231754ad8820", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.022405] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897684, 'name': CreateVM_Task, 'duration_secs': 0.924915} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.022536] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1257.026526] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.026703] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.027033] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1257.027293] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f7c53d0-562d-46a7-a3b1-ed5c32486760 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.033072] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.033620] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1257.039460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.615s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.039748] env[69992]: DEBUG nova.objects.instance [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lazy-loading 'resources' on Instance uuid fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.041058] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1257.041058] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5222f01c-62c8-95a2-d546-492869131e70" [ 1257.041058] env[69992]: _type = "Task" [ 1257.041058] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.053949] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5222f01c-62c8-95a2-d546-492869131e70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.060986] env[69992]: INFO nova.compute.manager [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Took 45.39 seconds to build instance. [ 1257.121155] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897686, 'name': Rename_Task, 'duration_secs': 0.199928} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.121459] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.121711] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d06f8332-d059-464d-ab46-51b1441ffbbf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.127687] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1257.127687] env[69992]: value = "task-2897687" [ 1257.127687] env[69992]: _type = "Task" [ 1257.127687] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.135255] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.310277] env[69992]: DEBUG oslo_concurrency.lockutils [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] Releasing lock "refresh_cache-dedba037-48a7-4083-925d-5f34e2a27362" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.310611] env[69992]: DEBUG nova.compute.manager [req-722d53bf-2776-4712-8de4-6daf19fc1f90 req-2bcf3b19-5653-4406-a8d3-555ab09621b7 service nova] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Received event network-vif-deleted-789f6123-167b-48dd-ae68-cfdbc1d5c78a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1257.376916] env[69992]: DEBUG nova.network.neutron [-] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.547642] env[69992]: DEBUG nova.compute.utils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1257.550590] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1257.550590] env[69992]: DEBUG nova.network.neutron [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1257.565447] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe7e124b-d9ba-4308-bda3-41ff5c653618 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.911s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.565447] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5222f01c-62c8-95a2-d546-492869131e70, 'name': SearchDatastore_Task, 'duration_secs': 0.020371} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.565447] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.565447] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1257.565447] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.565447] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.565861] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1257.566073] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dced6185-a31e-4f6a-80fd-507dd886ec91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.575813] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1257.576011] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1257.576725] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-414d33bb-b4d2-475c-8b7a-66a4d735fd89 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.582384] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1257.582384] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52212186-52e7-c11f-b128-7343ef973d12" [ 1257.582384] env[69992]: _type = "Task" [ 1257.582384] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.593474] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52212186-52e7-c11f-b128-7343ef973d12, 'name': SearchDatastore_Task, 'duration_secs': 0.008477} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.596350] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20292be3-9228-4d33-904e-27ebaf12d026 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.601478] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1257.601478] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a41c79-be58-fa51-477d-b1571d084367" [ 1257.601478] env[69992]: _type = "Task" [ 1257.601478] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.606693] env[69992]: DEBUG nova.policy [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e7acd70754b4b5d966bcc0662b9a2e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca458056b0794b08b812f0a4106a448c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1257.612585] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a41c79-be58-fa51-477d-b1571d084367, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.643464] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897687, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.844287] env[69992]: DEBUG nova.compute.manager [req-566612a8-4a73-47ad-9d04-39ca97d98125 req-325c0bb1-2592-48d9-a60c-1786b708f6d7 service nova] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Received event network-vif-deleted-e7144a56-29c2-4983-b57d-7e1b28077d55 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1257.882056] env[69992]: INFO nova.compute.manager [-] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Took 1.33 seconds to deallocate network for instance. [ 1257.936344] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f631b54-b12a-4574-b289-622fdfe1ee4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.947714] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaf0381-5ad1-4b4b-8985-c25e65ee9fde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.982227] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf319f16-12d7-4366-9ba6-297bf4ddfc50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.990559] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cb4a1b-ebcb-4aea-ad27-dada416fb3ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.005886] env[69992]: DEBUG nova.compute.provider_tree [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.056846] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1258.114676] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a41c79-be58-fa51-477d-b1571d084367, 'name': SearchDatastore_Task, 'duration_secs': 0.019691} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.115124] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.115288] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1258.115583] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-276ad3b1-f591-4ed9-8b15-40b896830af3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.123193] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1258.123193] env[69992]: value = "task-2897688" [ 1258.123193] env[69992]: _type = "Task" [ 1258.123193] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.132080] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.132429] env[69992]: DEBUG nova.network.neutron [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Successfully created port: 2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1258.142576] env[69992]: DEBUG oslo_vmware.api [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897687, 'name': PowerOnVM_Task, 'duration_secs': 0.648002} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.142833] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.143390] env[69992]: INFO nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Took 7.04 seconds to spawn the instance on the hypervisor. [ 1258.143390] env[69992]: DEBUG nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.144192] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16369d4a-bf0a-4a19-aba0-f3c43ed95d6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.388375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.512340] env[69992]: DEBUG nova.scheduler.client.report [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1258.633798] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897688, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.667372] env[69992]: INFO nova.compute.manager [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Took 43.62 seconds to build instance. [ 1259.017324] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.019937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 35.341s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.021110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.021110] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1259.021110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.002s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.021110] env[69992]: DEBUG nova.objects.instance [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1259.024256] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1087cf1e-5ab7-46ef-a6ee-f541ff1aa305 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.034725] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c579b79f-6ca7-4234-887e-cf64331145f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.044037] env[69992]: INFO nova.scheduler.client.report [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Deleted allocations for instance fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2 [ 1259.057836] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c4c545-7f7b-4a78-87d7-0696e29909c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.067667] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1259.071978] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4a860e-b00d-42ea-add6-5ffc27d7c311 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.104861] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177861MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1259.105316] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.113991] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1259.114279] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1259.114446] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1259.114672] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1259.115179] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1259.115179] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1259.115320] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1259.115377] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1259.115545] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1259.115710] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1259.115944] env[69992]: DEBUG nova.virt.hardware [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1259.117075] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c42b2d7-29d9-4f19-884c-d45683712a09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.129132] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eabb7b1-bdf8-49b0-b9ff-0957b7adc330 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.147867] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660693} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.148789] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1259.149117] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1259.149357] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8a51150-103c-4ab6-ae32-8bd7db64bc4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.156186] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1259.156186] env[69992]: value = "task-2897689" [ 1259.156186] env[69992]: _type = "Task" [ 1259.156186] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.165862] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.170065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-eb72a942-b30c-4770-8c95-faf19cccf203 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.136s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.565732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d36d1d45-e40a-4913-a30b-8518fae50ca5 tempest-ServerActionsV293TestJSON-1685366128 tempest-ServerActionsV293TestJSON-1685366128-project-member] Lock "fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.659s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.666507] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07385} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.667215] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1259.668153] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887c1b2e-851f-4c8b-b7dc-1006a5d08cd6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.695102] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1259.695690] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7169cbab-5342-49c6-bab2-20df5987c07e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.722224] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1259.722224] env[69992]: value = "task-2897690" [ 1259.722224] env[69992]: _type = "Task" [ 1259.722224] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.731777] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897690, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.928376] env[69992]: DEBUG nova.compute.manager [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Received event network-changed-942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1259.928589] env[69992]: DEBUG nova.compute.manager [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Refreshing instance network info cache due to event network-changed-942293fd-c866-4331-b9d4-f667536a039b. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1259.929415] env[69992]: DEBUG oslo_concurrency.lockutils [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] Acquiring lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.929415] env[69992]: DEBUG oslo_concurrency.lockutils [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] Acquired lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.929415] env[69992]: DEBUG nova.network.neutron [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Refreshing network info cache for port 942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1260.033569] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6c525815-1b8a-45c6-8ecd-0d8bae7b1912 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.034743] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.716s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.034970] env[69992]: DEBUG nova.objects.instance [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lazy-loading 'resources' on Instance uuid fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.142437] env[69992]: DEBUG nova.compute.manager [req-d2b6bbc9-ad87-47cc-9fe1-13a2deb8a642 req-905a0f4d-c6cb-4976-97f9-888b17ab387d service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Received event network-vif-plugged-2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1260.142626] env[69992]: DEBUG oslo_concurrency.lockutils [req-d2b6bbc9-ad87-47cc-9fe1-13a2deb8a642 req-905a0f4d-c6cb-4976-97f9-888b17ab387d service nova] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.142823] env[69992]: DEBUG oslo_concurrency.lockutils [req-d2b6bbc9-ad87-47cc-9fe1-13a2deb8a642 req-905a0f4d-c6cb-4976-97f9-888b17ab387d service nova] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.143187] env[69992]: DEBUG oslo_concurrency.lockutils [req-d2b6bbc9-ad87-47cc-9fe1-13a2deb8a642 req-905a0f4d-c6cb-4976-97f9-888b17ab387d service nova] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.143187] env[69992]: DEBUG nova.compute.manager [req-d2b6bbc9-ad87-47cc-9fe1-13a2deb8a642 req-905a0f4d-c6cb-4976-97f9-888b17ab387d service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] No waiting events found dispatching network-vif-plugged-2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1260.143452] env[69992]: WARNING nova.compute.manager [req-d2b6bbc9-ad87-47cc-9fe1-13a2deb8a642 req-905a0f4d-c6cb-4976-97f9-888b17ab387d service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Received unexpected event network-vif-plugged-2c7ae122-41e5-4605-a33e-4516dd1f5945 for instance with vm_state building and task_state spawning. [ 1260.231053] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.734125] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897690, 'name': ReconfigVM_Task, 'duration_secs': 0.811637} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.735268] env[69992]: DEBUG nova.network.neutron [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Successfully updated port: 2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1260.737081] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Reconfigured VM instance instance-00000054 to attach disk [datastore1] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1260.738195] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c70f6d65-326b-4a3e-a4e8-f76583b03d64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.746420] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1260.746420] env[69992]: value = "task-2897691" [ 1260.746420] env[69992]: _type = "Task" [ 1260.746420] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.761354] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897691, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.825506] env[69992]: DEBUG nova.network.neutron [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updated VIF entry in instance network info cache for port 942293fd-c866-4331-b9d4-f667536a039b. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1260.825870] env[69992]: DEBUG nova.network.neutron [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updating instance_info_cache with network_info: [{"id": "942293fd-c866-4331-b9d4-f667536a039b", "address": "fa:16:3e:3c:2b:0e", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942293fd-c8", "ovs_interfaceid": "942293fd-c866-4331-b9d4-f667536a039b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.937805] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb7388e-04f7-43d7-ad09-d5ddb89d7a2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.949220] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5207db6-d9e4-4341-81ee-6f6aeb715813 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.982823] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7891db95-fb4f-44ee-89d1-d7633e51157c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.990648] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3a65f4-ac6d-4903-b0e2-546d50709082 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.009483] env[69992]: DEBUG nova.compute.provider_tree [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.239512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.239658] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.239819] env[69992]: DEBUG nova.network.neutron [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.260011] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897691, 'name': Rename_Task, 'duration_secs': 0.382298} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.260349] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1261.260652] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f00e86b6-542e-4a85-a542-363b832a7ccd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.267241] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1261.267241] env[69992]: value = "task-2897692" [ 1261.267241] env[69992]: _type = "Task" [ 1261.267241] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.275861] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.291922] env[69992]: DEBUG nova.compute.manager [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1261.329211] env[69992]: DEBUG oslo_concurrency.lockutils [req-a6252c81-75a8-4551-a453-a7ad6b99ef93 req-6eee07b7-e5ca-44bd-bec5-b545c954057f service nova] Releasing lock "refresh_cache-57702674-4c96-4577-a93f-24ecffebb3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.512794] env[69992]: DEBUG nova.scheduler.client.report [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.781349] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897692, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.807250] env[69992]: DEBUG nova.network.neutron [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.833452] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.971835] env[69992]: DEBUG nova.compute.manager [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Received event network-changed-2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1261.972047] env[69992]: DEBUG nova.compute.manager [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Refreshing instance network info cache due to event network-changed-2c7ae122-41e5-4605-a33e-4516dd1f5945. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1261.972277] env[69992]: DEBUG oslo_concurrency.lockutils [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.994075] env[69992]: DEBUG nova.network.neutron [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.018474] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.984s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.020841] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.173s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.022993] env[69992]: INFO nova.compute.claims [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1262.049590] env[69992]: INFO nova.scheduler.client.report [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Deleted allocations for instance fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0 [ 1262.285088] env[69992]: DEBUG oslo_vmware.api [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897692, 'name': PowerOnVM_Task, 'duration_secs': 0.751106} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.286089] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1262.286451] env[69992]: INFO nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Took 8.71 seconds to spawn the instance on the hypervisor. [ 1262.286756] env[69992]: DEBUG nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1262.288054] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47334d46-120c-4ad7-a4e8-4a4ab1c1ce24 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.497302] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.497659] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Instance network_info: |[{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1262.497971] env[69992]: DEBUG oslo_concurrency.lockutils [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.498348] env[69992]: DEBUG nova.network.neutron [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Refreshing network info cache for port 2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1262.499386] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:be:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c7ae122-41e5-4605-a33e-4516dd1f5945', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1262.508663] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1262.509760] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1262.509973] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ed8143b-c0ea-4101-b6a7-b49dd6bcd76b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.538131] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1262.538131] env[69992]: value = "task-2897693" [ 1262.538131] env[69992]: _type = "Task" [ 1262.538131] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.546036] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897693, 'name': CreateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.565612] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6ce9cfd5-ec88-470c-95c0-289d4ec88d1e tempest-ServersV294TestFqdnHostnames-1691937918 tempest-ServersV294TestFqdnHostnames-1691937918-project-member] Lock "fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.996s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.811042] env[69992]: INFO nova.compute.manager [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Took 46.49 seconds to build instance. [ 1263.053887] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897693, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.312925] env[69992]: DEBUG oslo_concurrency.lockutils [None req-21cfcf52-a023-45bc-9ce3-113428d798d9 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.356028] env[69992]: DEBUG nova.network.neutron [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updated VIF entry in instance network info cache for port 2c7ae122-41e5-4605-a33e-4516dd1f5945. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1263.357445] env[69992]: DEBUG nova.network.neutron [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.414419] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d359c9a-6103-4411-b83c-edee6ee7b2d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.426501] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe26365-ad87-4370-b7fc-a20553b828fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.460233] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c05ef5-dfad-46b4-8b70-bb82456d92a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.469166] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec049a4-cf2f-455e-a829-b60878f283b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.487484] env[69992]: DEBUG nova.compute.provider_tree [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.553404] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897693, 'name': CreateVM_Task, 'duration_secs': 0.744927} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.556541] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1263.556541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.556541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.556541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1263.556541] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b176227-3129-4bff-9c13-fc798fbb1877 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.564610] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1263.564610] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52548319-8faf-a77c-81aa-5298614f0f96" [ 1263.564610] env[69992]: _type = "Task" [ 1263.564610] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.571727] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52548319-8faf-a77c-81aa-5298614f0f96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.760049] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "dedba037-48a7-4083-925d-5f34e2a27362" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.760049] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.760049] env[69992]: DEBUG nova.compute.manager [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1263.760049] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af31451-7d10-4a41-8e24-ed92e0e0ddcc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.767981] env[69992]: DEBUG nova.compute.manager [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1263.770030] env[69992]: DEBUG nova.objects.instance [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'flavor' on Instance uuid dedba037-48a7-4083-925d-5f34e2a27362 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1263.863482] env[69992]: DEBUG oslo_concurrency.lockutils [req-55fc72cf-f7b8-42a5-976c-680bc780d765 req-470b0b5e-d5e0-4390-b3bd-281acc71fa10 service nova] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.992623] env[69992]: DEBUG nova.scheduler.client.report [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1264.073676] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52548319-8faf-a77c-81aa-5298614f0f96, 'name': SearchDatastore_Task, 'duration_secs': 0.010337} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.073988] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.074239] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.074479] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.074625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.074803] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1264.075079] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4445ccf-23a1-4ec4-88b1-849653d8f895 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.085080] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1264.085263] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1264.085964] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ad4d102-ed57-43fc-b8d1-ab57ad5b5634 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.091194] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1264.091194] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52284023-68f5-b635-2d98-88793a34bdf3" [ 1264.091194] env[69992]: _type = "Task" [ 1264.091194] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.100353] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52284023-68f5-b635-2d98-88793a34bdf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.497617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.498208] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1264.501085] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.642s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.501366] env[69992]: DEBUG nova.objects.instance [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lazy-loading 'resources' on Instance uuid 451a8af1-a4a2-4c2d-932c-58955491433b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1264.602257] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52284023-68f5-b635-2d98-88793a34bdf3, 'name': SearchDatastore_Task, 'duration_secs': 0.010071} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.603120] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e87fad3-dc58-4c26-a2b6-7363908d7919 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.608417] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1264.608417] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525ef87d-f6a9-40d3-a69b-6b31cf7ebe51" [ 1264.608417] env[69992]: _type = "Task" [ 1264.608417] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.619211] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525ef87d-f6a9-40d3-a69b-6b31cf7ebe51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.782394] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1264.782694] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-884d4104-e820-4537-92a7-66e38fd8d29d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.788938] env[69992]: DEBUG oslo_vmware.api [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1264.788938] env[69992]: value = "task-2897694" [ 1264.788938] env[69992]: _type = "Task" [ 1264.788938] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.799717] env[69992]: DEBUG oslo_vmware.api [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.006054] env[69992]: DEBUG nova.compute.utils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1265.009856] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1265.010204] env[69992]: DEBUG nova.network.neutron [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1265.069452] env[69992]: DEBUG nova.policy [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc6792edfe6245d2ba77a14aba041ca0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '658cab8ee4194f7f98dd07de450f248b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1265.123791] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525ef87d-f6a9-40d3-a69b-6b31cf7ebe51, 'name': SearchDatastore_Task, 'duration_secs': 0.012622} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.124266] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.124695] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1265.125157] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96945efe-3c32-45e9-982e-2cb41ec05d36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.134038] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1265.134038] env[69992]: value = "task-2897695" [ 1265.134038] env[69992]: _type = "Task" [ 1265.134038] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.141932] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.303790] env[69992]: DEBUG oslo_vmware.api [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897694, 'name': PowerOffVM_Task, 'duration_secs': 0.211442} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.303790] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1265.306949] env[69992]: DEBUG nova.compute.manager [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1265.306949] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354ea527-0644-4f80-971a-1e81545e3276 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.383122] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60530b2b-3582-47c8-b8f9-581e86e4cd47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.395913] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7eb51b-f62f-42d3-8d58-b24fac3e7176 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.430687] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1fa02d-c251-4707-83f6-e7afe0141d9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.439770] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073d3792-1b9a-405d-a547-09372c0fae59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.458553] env[69992]: DEBUG nova.compute.provider_tree [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.462783] env[69992]: DEBUG nova.network.neutron [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Successfully created port: 087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1265.510864] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1265.643434] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897695, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.825937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a08d7e9f-9a84-4c81-bd44-2ed1c4dcd4af tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.068s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.965031] env[69992]: DEBUG nova.scheduler.client.report [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1266.144110] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530969} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.144417] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1266.144596] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1266.144838] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-151b650f-e44f-4116-9ca8-6c89fafb705d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.152214] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1266.152214] env[69992]: value = "task-2897696" [ 1266.152214] env[69992]: _type = "Task" [ 1266.152214] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.162671] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.396617] env[69992]: INFO nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Rebuilding instance [ 1266.444800] env[69992]: DEBUG nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1266.445690] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3095283d-1468-4688-9497-155e6eeadf3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.472327] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.475605] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.206s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.477226] env[69992]: INFO nova.compute.claims [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1266.503318] env[69992]: INFO nova.scheduler.client.report [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Deleted allocations for instance 451a8af1-a4a2-4c2d-932c-58955491433b [ 1266.520560] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1266.547092] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1266.547377] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1266.547552] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1266.547752] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1266.547902] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1266.548206] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1266.549087] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1266.549087] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1266.549087] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1266.549087] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1266.549319] env[69992]: DEBUG nova.virt.hardware [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1266.550502] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78171bd-38db-428b-8171-e80c97a0221a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.559612] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5943f3-8786-4d3c-8756-a4207157b7b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.662365] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078415} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.662365] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1266.662934] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18eae3f-087b-41bd-b51a-3d7dea5d146a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.685082] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1266.685474] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1741032-47e1-480b-8681-36bf1b04de67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.705868] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1266.705868] env[69992]: value = "task-2897697" [ 1266.705868] env[69992]: _type = "Task" [ 1266.705868] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.718633] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897697, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.017104] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d01061f6-3d78-49bb-aa31-6493c64f9267 tempest-ServersListShow296Test-370241449 tempest-ServersListShow296Test-370241449-project-member] Lock "451a8af1-a4a2-4c2d-932c-58955491433b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.289s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.038100] env[69992]: DEBUG nova.compute.manager [req-6db2ffd9-8b38-47f8-b86c-940768b4207a req-f59b6984-ecf0-442c-b500-a85e731d9ffd service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Received event network-vif-plugged-087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1267.038340] env[69992]: DEBUG oslo_concurrency.lockutils [req-6db2ffd9-8b38-47f8-b86c-940768b4207a req-f59b6984-ecf0-442c-b500-a85e731d9ffd service nova] Acquiring lock "c6e4f19b-7264-4eea-a472-f64a68d4df22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.038587] env[69992]: DEBUG oslo_concurrency.lockutils [req-6db2ffd9-8b38-47f8-b86c-940768b4207a req-f59b6984-ecf0-442c-b500-a85e731d9ffd service nova] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.038782] env[69992]: DEBUG oslo_concurrency.lockutils [req-6db2ffd9-8b38-47f8-b86c-940768b4207a req-f59b6984-ecf0-442c-b500-a85e731d9ffd service nova] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.038974] env[69992]: DEBUG nova.compute.manager [req-6db2ffd9-8b38-47f8-b86c-940768b4207a req-f59b6984-ecf0-442c-b500-a85e731d9ffd service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] No waiting events found dispatching network-vif-plugged-087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1267.039168] env[69992]: WARNING nova.compute.manager [req-6db2ffd9-8b38-47f8-b86c-940768b4207a req-f59b6984-ecf0-442c-b500-a85e731d9ffd service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Received unexpected event network-vif-plugged-087e09d7-8b3f-4d66-9008-d97120de9f30 for instance with vm_state building and task_state spawning. [ 1267.217525] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.231679] env[69992]: DEBUG nova.network.neutron [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Successfully updated port: 087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1267.465071] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1267.465071] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8a4e164-425c-4ea2-b692-7ec369c0165c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.472803] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1267.472803] env[69992]: value = "task-2897698" [ 1267.472803] env[69992]: _type = "Task" [ 1267.472803] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.482966] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1267.483362] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1267.484223] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4862cc3-2ea9-4eda-adaa-e9f8c6408a71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.495447] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1267.495447] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b762f45b-b293-4d02-852c-2a7135203a52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.576569] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1267.576569] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1267.576569] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore1] dedba037-48a7-4083-925d-5f34e2a27362 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1267.576829] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe40c55a-ad65-4be5-a53a-0f5b53d9b525 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.582847] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1267.582847] env[69992]: value = "task-2897700" [ 1267.582847] env[69992]: _type = "Task" [ 1267.582847] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.600945] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.726833] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897697, 'name': ReconfigVM_Task, 'duration_secs': 0.992807} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.727420] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1267.728339] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db852056-d35b-4989-a546-5b8204b9391d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.734456] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1267.734456] env[69992]: value = "task-2897701" [ 1267.734456] env[69992]: _type = "Task" [ 1267.734456] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.738277] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "refresh_cache-c6e4f19b-7264-4eea-a472-f64a68d4df22" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.738277] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "refresh_cache-c6e4f19b-7264-4eea-a472-f64a68d4df22" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.738412] env[69992]: DEBUG nova.network.neutron [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1267.746514] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897701, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.875823] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577e1b5c-d3e1-45c0-af03-69d269d74016 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.884520] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d70e40e-92aa-475c-8086-066fd924f708 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.926771] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18020ec4-49ff-40ed-8ab3-dcc3b258bda5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.936637] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f64fb9e-0153-434f-b7e3-00ef3aae9924 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.957099] env[69992]: DEBUG nova.compute.provider_tree [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.093820] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897700, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17319} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.097278] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1268.097530] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1268.097720] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1268.248273] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897701, 'name': Rename_Task, 'duration_secs': 0.166281} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.248906] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1268.251084] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5ebceb8-554a-4178-9880-b2e1acf06068 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.263015] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1268.263015] env[69992]: value = "task-2897702" [ 1268.263015] env[69992]: _type = "Task" [ 1268.263015] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.278487] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.305886] env[69992]: DEBUG nova.network.neutron [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1268.462883] env[69992]: DEBUG nova.scheduler.client.report [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1268.481161] env[69992]: DEBUG nova.network.neutron [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Updating instance_info_cache with network_info: [{"id": "087e09d7-8b3f-4d66-9008-d97120de9f30", "address": "fa:16:3e:be:f5:54", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087e09d7-8b", "ovs_interfaceid": "087e09d7-8b3f-4d66-9008-d97120de9f30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.773852] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897702, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.970691] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.971582] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1268.974335] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.682s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.974551] env[69992]: DEBUG nova.objects.instance [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lazy-loading 'resources' on Instance uuid 546fb923-4574-4407-8625-69e6c4d8d35e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.984580] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "refresh_cache-c6e4f19b-7264-4eea-a472-f64a68d4df22" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.984858] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Instance network_info: |[{"id": "087e09d7-8b3f-4d66-9008-d97120de9f30", "address": "fa:16:3e:be:f5:54", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087e09d7-8b", "ovs_interfaceid": "087e09d7-8b3f-4d66-9008-d97120de9f30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1268.985290] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:f5:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '087e09d7-8b3f-4d66-9008-d97120de9f30', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1268.993007] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1268.993983] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1268.994712] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-629499a5-4b35-4d83-8b2a-38043159d003 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.019186] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1269.019186] env[69992]: value = "task-2897703" [ 1269.019186] env[69992]: _type = "Task" [ 1269.019186] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.027775] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897703, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.139166] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1269.139515] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.139846] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1269.140082] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.140240] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1269.140464] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1269.140686] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1269.140847] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1269.141025] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1269.141197] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1269.141377] env[69992]: DEBUG nova.virt.hardware [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1269.142245] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56a929d-1eae-40f3-9c67-694291f4bb4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.150073] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a33685-40c1-4d31-acef-9481a91a6f2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.164150] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:7a:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cb0f0fd-a197-4688-a99f-231754ad8820', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1269.172103] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1269.172387] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1269.172633] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8005e80a-3ac5-42ab-8a65-7da52caa0c97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.193144] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1269.193144] env[69992]: value = "task-2897704" [ 1269.193144] env[69992]: _type = "Task" [ 1269.193144] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.203467] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897704, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.272804] env[69992]: DEBUG oslo_vmware.api [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897702, 'name': PowerOnVM_Task, 'duration_secs': 0.549982} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.273077] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1269.273335] env[69992]: INFO nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Took 10.21 seconds to spawn the instance on the hypervisor. [ 1269.274057] env[69992]: DEBUG nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1269.274394] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc1a8d8-0987-4531-a4c8-7940e5824641 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.435753] env[69992]: DEBUG nova.compute.manager [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Received event network-changed-087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1269.436542] env[69992]: DEBUG nova.compute.manager [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Refreshing instance network info cache due to event network-changed-087e09d7-8b3f-4d66-9008-d97120de9f30. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1269.436905] env[69992]: DEBUG oslo_concurrency.lockutils [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] Acquiring lock "refresh_cache-c6e4f19b-7264-4eea-a472-f64a68d4df22" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.437206] env[69992]: DEBUG oslo_concurrency.lockutils [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] Acquired lock "refresh_cache-c6e4f19b-7264-4eea-a472-f64a68d4df22" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.437508] env[69992]: DEBUG nova.network.neutron [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Refreshing network info cache for port 087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1269.478405] env[69992]: DEBUG nova.compute.utils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1269.479812] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1269.480561] env[69992]: DEBUG nova.network.neutron [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1269.531880] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897703, 'name': CreateVM_Task, 'duration_secs': 0.352759} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.537756] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1269.537756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.537756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.537756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1269.537756] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65c4eaed-9333-4e32-a0ba-519a48e74f26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.541875] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1269.541875] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521ceb39-9ef9-9221-831d-9909ef22d7b8" [ 1269.541875] env[69992]: _type = "Task" [ 1269.541875] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.546801] env[69992]: DEBUG nova.policy [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94f19c179a3545089bcc66b7e5dc36e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4da04b8933ad4d2ba4b1c193853f31b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1269.553984] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ceb39-9ef9-9221-831d-9909ef22d7b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.703937] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897704, 'name': CreateVM_Task, 'duration_secs': 0.40995} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.704776] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1269.704900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.800776] env[69992]: INFO nova.compute.manager [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Took 47.30 seconds to build instance. [ 1269.828872] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86265721-e457-48ef-b84a-07936f382c82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.838619] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f34b5c5-1797-43e7-845c-ad007be6b7a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.874821] env[69992]: DEBUG nova.network.neutron [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Successfully created port: e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1269.880028] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cedb46-8015-4fcb-8a26-c5727c4b12ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.888034] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ec241c-affe-40cb-98bc-29303e34b257 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.905189] env[69992]: DEBUG nova.compute.provider_tree [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.920941] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.921350] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.984178] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1270.058390] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521ceb39-9ef9-9221-831d-9909ef22d7b8, 'name': SearchDatastore_Task, 'duration_secs': 0.030732} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.059050] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.059470] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1270.059733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.059883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.060080] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.060402] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.060719] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1270.060959] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0adbc305-db21-40da-a7de-609a08c7c5be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.063267] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-370aedaf-d821-49c2-a536-5fe4332ceba6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.069479] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1270.069479] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52285c73-ef3a-b8fa-0b38-dd96dffe10b9" [ 1270.069479] env[69992]: _type = "Task" [ 1270.069479] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.076732] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.076927] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1270.078524] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b71e6c2-e5f5-4b18-96f1-e66268d562f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.084564] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52285c73-ef3a-b8fa-0b38-dd96dffe10b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.087791] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1270.087791] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d47404-f8da-3ded-18b4-ff5dbc9a69d1" [ 1270.087791] env[69992]: _type = "Task" [ 1270.087791] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.095976] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d47404-f8da-3ded-18b4-ff5dbc9a69d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.304756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1ff7c1f-b11a-4214-8a34-9190ca321f5b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.812s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.326269] env[69992]: DEBUG nova.network.neutron [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Updated VIF entry in instance network info cache for port 087e09d7-8b3f-4d66-9008-d97120de9f30. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1270.326677] env[69992]: DEBUG nova.network.neutron [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Updating instance_info_cache with network_info: [{"id": "087e09d7-8b3f-4d66-9008-d97120de9f30", "address": "fa:16:3e:be:f5:54", "network": {"id": "bea180e9-720e-4be5-bb1d-8aa1243cfe3f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-67313604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "658cab8ee4194f7f98dd07de450f248b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087e09d7-8b", "ovs_interfaceid": "087e09d7-8b3f-4d66-9008-d97120de9f30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.412260] env[69992]: DEBUG nova.scheduler.client.report [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.428726] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1270.583231] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52285c73-ef3a-b8fa-0b38-dd96dffe10b9, 'name': SearchDatastore_Task, 'duration_secs': 0.019697} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.583903] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.583903] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1270.584291] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.597176] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d47404-f8da-3ded-18b4-ff5dbc9a69d1, 'name': SearchDatastore_Task, 'duration_secs': 0.023604} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.597968] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a94ce321-61b0-4d6c-a069-fe7b63eb5b8c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.603876] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1270.603876] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]520933eb-0386-cf43-ae79-3b3da5ead9bc" [ 1270.603876] env[69992]: _type = "Task" [ 1270.603876] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.611783] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520933eb-0386-cf43-ae79-3b3da5ead9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.829559] env[69992]: DEBUG oslo_concurrency.lockutils [req-0b51f415-96f4-4b6d-80f9-7ec8c1007257 req-5b58c940-4627-48a0-822e-4242dc0fa3ea service nova] Releasing lock "refresh_cache-c6e4f19b-7264-4eea-a472-f64a68d4df22" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.918365] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.926021] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.789s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.926021] env[69992]: DEBUG nova.objects.instance [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lazy-loading 'resources' on Instance uuid a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.947970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.956400] env[69992]: INFO nova.scheduler.client.report [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted allocations for instance 546fb923-4574-4407-8625-69e6c4d8d35e [ 1270.998196] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1271.037866] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1271.038034] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.038082] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1271.038313] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.038413] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1271.038576] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1271.038834] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1271.038909] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1271.039447] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1271.039447] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1271.039676] env[69992]: DEBUG nova.virt.hardware [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1271.040980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460d9055-4cc0-49cf-98a8-fb400b197c44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.051057] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c204bfa2-16a2-41ec-a1d1-59daccdf2bf2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.115940] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520933eb-0386-cf43-ae79-3b3da5ead9bc, 'name': SearchDatastore_Task, 'duration_secs': 0.037302} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.116578] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.116880] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c6e4f19b-7264-4eea-a472-f64a68d4df22/c6e4f19b-7264-4eea-a472-f64a68d4df22.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1271.117186] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.117378] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1271.117597] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62f9c9d5-aa53-4f67-8a15-28625c8017b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.120244] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70476646-5baa-452e-8593-4417c7b8a901 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.129101] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1271.129101] env[69992]: value = "task-2897705" [ 1271.129101] env[69992]: _type = "Task" [ 1271.129101] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.132684] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1271.132684] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1271.133558] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75a345b7-a3d1-41d1-86bf-52c0013e84e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.139496] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.143020] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1271.143020] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce4584-9782-26db-825d-f81bf909f87c" [ 1271.143020] env[69992]: _type = "Task" [ 1271.143020] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.151256] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce4584-9782-26db-825d-f81bf909f87c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.467814] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d5985779-4f93-4aef-b484-793c665c4e61 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "546fb923-4574-4407-8625-69e6c4d8d35e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.138s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.482627] env[69992]: DEBUG nova.compute.manager [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Received event network-changed-2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1271.483153] env[69992]: DEBUG nova.compute.manager [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Refreshing instance network info cache due to event network-changed-2c7ae122-41e5-4605-a33e-4516dd1f5945. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1271.484130] env[69992]: DEBUG oslo_concurrency.lockutils [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.484130] env[69992]: DEBUG oslo_concurrency.lockutils [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.484130] env[69992]: DEBUG nova.network.neutron [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Refreshing network info cache for port 2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1271.642165] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897705, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.658775] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ce4584-9782-26db-825d-f81bf909f87c, 'name': SearchDatastore_Task, 'duration_secs': 0.01289} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.659630] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68128fe7-edc1-413a-8b9f-8c5e7cf519e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.669936] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1271.669936] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52508e03-0dde-ddb2-ee9c-c36e4d8c7d07" [ 1271.669936] env[69992]: _type = "Task" [ 1271.669936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.679638] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52508e03-0dde-ddb2-ee9c-c36e4d8c7d07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.827071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f89cce0-191f-47db-8e9d-b1212d7e0c4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.834694] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bce8177-33a8-4edb-ac31-b4872b7dca42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.870937] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2777353d-7d40-4b40-a802-397150fa081d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.878134] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5577d09a-e096-4434-94a0-806fe8f4cdec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.891841] env[69992]: DEBUG nova.compute.provider_tree [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.987751] env[69992]: DEBUG nova.network.neutron [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Successfully updated port: e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1272.143284] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6237} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.143994] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] c6e4f19b-7264-4eea-a472-f64a68d4df22/c6e4f19b-7264-4eea-a472-f64a68d4df22.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1272.144189] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1272.144456] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea1f8f2e-c268-45ed-bfb5-2753cc5f46ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.153536] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1272.153536] env[69992]: value = "task-2897706" [ 1272.153536] env[69992]: _type = "Task" [ 1272.153536] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.162274] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.183609] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52508e03-0dde-ddb2-ee9c-c36e4d8c7d07, 'name': SearchDatastore_Task, 'duration_secs': 0.044181} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.183609] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.183609] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1272.185101] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-066dcff3-57f7-4165-a5df-bbff5b6edd61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.191770] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1272.191770] env[69992]: value = "task-2897707" [ 1272.191770] env[69992]: _type = "Task" [ 1272.191770] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.202858] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.357341] env[69992]: DEBUG nova.network.neutron [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updated VIF entry in instance network info cache for port 2c7ae122-41e5-4605-a33e-4516dd1f5945. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.357681] env[69992]: DEBUG nova.network.neutron [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.396104] env[69992]: DEBUG nova.scheduler.client.report [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1272.493756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.494029] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.496173] env[69992]: DEBUG nova.network.neutron [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1272.663646] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.255076} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.663919] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1272.665475] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbfe2ce-4b36-4962-8e5a-2ab1da029ac2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.690324] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] c6e4f19b-7264-4eea-a472-f64a68d4df22/c6e4f19b-7264-4eea-a472-f64a68d4df22.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1272.690811] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdb02de4-1a35-4420-a304-7b75169c2319 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.714967] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.716325] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1272.716325] env[69992]: value = "task-2897708" [ 1272.716325] env[69992]: _type = "Task" [ 1272.716325] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.725058] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897708, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.861117] env[69992]: DEBUG oslo_concurrency.lockutils [req-101d637b-f91c-4cf1-acea-b9425d7ac670 req-360a71c8-2818-4905-8d3f-ea2cc5211f13 service nova] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.902045] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.980s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.905623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.572s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.907540] env[69992]: INFO nova.compute.claims [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.933253] env[69992]: INFO nova.scheduler.client.report [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Deleted allocations for instance a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190 [ 1273.039235] env[69992]: DEBUG nova.network.neutron [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1273.218505] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897707, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.988328} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.223382] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1273.223571] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1273.223846] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a11555f-75bb-428b-953a-43ef2cb4b538 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.231819] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897708, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.236661] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1273.236661] env[69992]: value = "task-2897709" [ 1273.236661] env[69992]: _type = "Task" [ 1273.236661] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.245257] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897709, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.299677] env[69992]: DEBUG nova.network.neutron [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updating instance_info_cache with network_info: [{"id": "e6b569b5-e098-47d5-80c8-8c95df681396", "address": "fa:16:3e:7f:9a:d9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b569b5-e0", "ovs_interfaceid": "e6b569b5-e098-47d5-80c8-8c95df681396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.445492] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5653cf02-c1f7-45c9-9624-1ef03ba17153 tempest-ServersListShow298Test-1873577797 tempest-ServersListShow298Test-1873577797-project-member] Lock "a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.591s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.674510] env[69992]: DEBUG nova.compute.manager [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Received event network-vif-plugged-e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1273.674510] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.674847] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.674847] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.675066] env[69992]: DEBUG nova.compute.manager [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] No waiting events found dispatching network-vif-plugged-e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1273.675252] env[69992]: WARNING nova.compute.manager [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Received unexpected event network-vif-plugged-e6b569b5-e098-47d5-80c8-8c95df681396 for instance with vm_state building and task_state spawning. [ 1273.675513] env[69992]: DEBUG nova.compute.manager [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Received event network-changed-e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1273.675584] env[69992]: DEBUG nova.compute.manager [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Refreshing instance network info cache due to event network-changed-e6b569b5-e098-47d5-80c8-8c95df681396. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1273.675724] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] Acquiring lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.732782] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897708, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.746795] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897709, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071352} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.747436] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1273.748260] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6bc8d5-0309-4c07-996d-b8e92608e7f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.774925] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1273.776255] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c1a1759-fdb8-400d-ae89-1d6a86635348 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.802010] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.802344] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Instance network_info: |[{"id": "e6b569b5-e098-47d5-80c8-8c95df681396", "address": "fa:16:3e:7f:9a:d9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b569b5-e0", "ovs_interfaceid": "e6b569b5-e098-47d5-80c8-8c95df681396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1273.803890] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] Acquired lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.804096] env[69992]: DEBUG nova.network.neutron [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Refreshing network info cache for port e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.805354] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:9a:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6b569b5-e098-47d5-80c8-8c95df681396', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1273.814485] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Creating folder: Project (4da04b8933ad4d2ba4b1c193853f31b2). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1273.814835] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1273.814835] env[69992]: value = "task-2897710" [ 1273.814835] env[69992]: _type = "Task" [ 1273.814835] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.816055] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e4162ff-1634-4dba-8a93-9ca8f4d205ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.827390] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.829583] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Created folder: Project (4da04b8933ad4d2ba4b1c193853f31b2) in parent group-v581821. [ 1273.829583] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Creating folder: Instances. Parent ref: group-v582062. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1273.829583] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2340c20c-9198-4e50-b59e-4b4de52d880f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.837713] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Created folder: Instances in parent group-v582062. [ 1273.837930] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1273.838138] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1273.838340] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6660e89f-0354-46f1-a57f-e32254526c59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.857956] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1273.857956] env[69992]: value = "task-2897713" [ 1273.857956] env[69992]: _type = "Task" [ 1273.857956] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.866469] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897713, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.232964] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897708, 'name': ReconfigVM_Task, 'duration_secs': 1.482403} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.235604] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Reconfigured VM instance instance-00000056 to attach disk [datastore2] c6e4f19b-7264-4eea-a472-f64a68d4df22/c6e4f19b-7264-4eea-a472-f64a68d4df22.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1274.236552] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa746066-97b8-45f5-881a-4afdbb49123a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.245466] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1274.245466] env[69992]: value = "task-2897714" [ 1274.245466] env[69992]: _type = "Task" [ 1274.245466] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.254030] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897714, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.297056] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb45e6a-03e5-469f-8553-5d148f13a699 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.305355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481a3f11-6ca2-4864-b8db-4698e7ec14c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.342430] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed0b9ac-a209-4b99-b381-0d9c19e5502c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.351528] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897710, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.354427] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4b53f1-3fa3-421d-8c22-9bc4d39c21ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.372062] env[69992]: DEBUG nova.compute.provider_tree [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.376433] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897713, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.622010] env[69992]: DEBUG nova.network.neutron [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updated VIF entry in instance network info cache for port e6b569b5-e098-47d5-80c8-8c95df681396. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1274.622486] env[69992]: DEBUG nova.network.neutron [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updating instance_info_cache with network_info: [{"id": "e6b569b5-e098-47d5-80c8-8c95df681396", "address": "fa:16:3e:7f:9a:d9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b569b5-e0", "ovs_interfaceid": "e6b569b5-e098-47d5-80c8-8c95df681396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.755374] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897714, 'name': Rename_Task, 'duration_secs': 0.421213} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.756139] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1274.756695] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c34ca35-e271-4521-a11b-a35b0e4db52f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.766941] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1274.766941] env[69992]: value = "task-2897715" [ 1274.766941] env[69992]: _type = "Task" [ 1274.766941] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.775545] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.847837] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897710, 'name': ReconfigVM_Task, 'duration_secs': 0.920635} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.849324] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Reconfigured VM instance instance-00000054 to attach disk [datastore2] dedba037-48a7-4083-925d-5f34e2a27362/dedba037-48a7-4083-925d-5f34e2a27362.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1274.849997] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb2d77f7-c6b5-473f-92bd-9f7fb8086b7e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.858312] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1274.858312] env[69992]: value = "task-2897716" [ 1274.858312] env[69992]: _type = "Task" [ 1274.858312] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.868664] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897713, 'name': CreateVM_Task, 'duration_secs': 0.998094} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.871839] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1274.872147] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897716, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.872869] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.873050] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.873368] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1274.873627] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de15010d-50ef-4b23-8690-ca68174d80ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.877876] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1274.877876] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5299e778-753b-884e-12f6-4aaadf77c2dd" [ 1274.877876] env[69992]: _type = "Task" [ 1274.877876] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.881698] env[69992]: DEBUG nova.scheduler.client.report [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1274.890901] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5299e778-753b-884e-12f6-4aaadf77c2dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.126090] env[69992]: DEBUG oslo_concurrency.lockutils [req-0d84e22e-4a07-40ff-95a1-f4fdf8d0d4ee req-40d61911-4c03-444b-9aff-3327cb686997 service nova] Releasing lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.280688] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897715, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.367619] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897716, 'name': Rename_Task, 'duration_secs': 0.169819} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.367619] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1275.367619] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7503875-997a-497d-9fab-017e229fa6af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.373701] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1275.373701] env[69992]: value = "task-2897717" [ 1275.373701] env[69992]: _type = "Task" [ 1275.373701] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.385614] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897717, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.386963] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.388363] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1275.395190] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.200s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.395321] env[69992]: DEBUG nova.objects.instance [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'resources' on Instance uuid 953c0e0d-3279-444c-b631-6ebbf24e5487 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.397012] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5299e778-753b-884e-12f6-4aaadf77c2dd, 'name': SearchDatastore_Task, 'duration_secs': 0.011083} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.397442] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.397680] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.397918] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.398715] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.398715] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1275.398715] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b708ad25-28ac-4cd1-b205-15643402cdc5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.410467] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1275.410467] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1275.412032] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c33025c9-5ca2-44c8-8d1b-5e62812dae36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.418077] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1275.418077] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d02092-14b4-cd6a-027c-92ebc92d7f3c" [ 1275.418077] env[69992]: _type = "Task" [ 1275.418077] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.428026] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d02092-14b4-cd6a-027c-92ebc92d7f3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.749586] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.750165] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.784412] env[69992]: DEBUG oslo_vmware.api [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897715, 'name': PowerOnVM_Task, 'duration_secs': 0.605623} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.784412] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1275.784412] env[69992]: INFO nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1275.784412] env[69992]: DEBUG nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1275.784412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b70084d-0547-43ff-afe6-869738f89086 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.888942] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897717, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.892751] env[69992]: DEBUG nova.compute.utils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1275.898025] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1275.898025] env[69992]: DEBUG nova.network.neutron [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1275.932987] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d02092-14b4-cd6a-027c-92ebc92d7f3c, 'name': SearchDatastore_Task, 'duration_secs': 0.011891} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.934217] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f480053-9e4d-41db-8e54-5f711a641778 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.939821] env[69992]: DEBUG nova.policy [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd43d38f16c04db1ba46ae836cbbd971', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f3a2959667e41f1b5868994454b21be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1275.945817] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1275.945817] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52054f00-5200-f6a8-41da-ebd0b7f94875" [ 1275.945817] env[69992]: _type = "Task" [ 1275.945817] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.955652] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52054f00-5200-f6a8-41da-ebd0b7f94875, 'name': SearchDatastore_Task, 'duration_secs': 0.009485} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.956735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.956735] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] e95e47c2-d82e-4153-8d16-7b65d992e91a/e95e47c2-d82e-4153-8d16-7b65d992e91a.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1275.956873] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-650923d5-b7d0-44a5-b828-34202e90ea32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.964147] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1275.964147] env[69992]: value = "task-2897718" [ 1275.964147] env[69992]: _type = "Task" [ 1275.964147] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.973312] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.225852] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e593e8-94ea-474b-988d-d2b769f29f97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.236686] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9f7d2e-a950-408a-bf03-7bba7cbe11c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.288640] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1276.292946] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.292946] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.293149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.293373] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.293601] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.303451] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ea0980-2d3e-4d4d-a5c4-948e965ba302 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.307208] env[69992]: INFO nova.compute.manager [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Terminating instance [ 1276.313470] env[69992]: INFO nova.compute.manager [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Took 47.49 seconds to build instance. [ 1276.321548] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de63af29-ff9d-4763-b907-6326e75b2f3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.338222] env[69992]: DEBUG nova.compute.provider_tree [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.345137] env[69992]: DEBUG nova.network.neutron [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Successfully created port: bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1276.391035] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897717, 'name': PowerOnVM_Task, 'duration_secs': 0.761715} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.391035] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1276.391035] env[69992]: DEBUG nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1276.391035] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbbd71b-6879-4bc0-864f-dc16692f3011 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.398452] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1276.479628] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897718, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.815660] env[69992]: DEBUG nova.compute.manager [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1276.815957] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.816871] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a41531e-7872-4620-a0fc-5c70f6bab913 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.820619] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.820992] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d951b117-01e0-4f30-a81e-056fcc18439a tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.424s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.827057] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1276.827057] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-745db5af-00d9-4cae-b8f1-81623451019f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.833252] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1276.833252] env[69992]: value = "task-2897719" [ 1276.833252] env[69992]: _type = "Task" [ 1276.833252] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.841704] env[69992]: DEBUG nova.scheduler.client.report [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1276.844931] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897719, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.913032] env[69992]: INFO nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] bringing vm to original state: 'stopped' [ 1276.980104] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659788} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.980104] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] e95e47c2-d82e-4153-8d16-7b65d992e91a/e95e47c2-d82e-4153-8d16-7b65d992e91a.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1276.980104] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1276.980361] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e5b347a-03a8-4bd5-b648-7dbe5b336f53 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.987747] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1276.987747] env[69992]: value = "task-2897720" [ 1276.987747] env[69992]: _type = "Task" [ 1276.987747] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.996570] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897720, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.343806] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897719, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.346660] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.349904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.262s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.350605] env[69992]: INFO nova.compute.claims [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1277.378714] env[69992]: INFO nova.scheduler.client.report [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance 953c0e0d-3279-444c-b631-6ebbf24e5487 [ 1277.414570] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1277.452187] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1277.452442] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.452657] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1277.452867] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.454518] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1277.454658] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1277.454892] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1277.455077] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1277.456212] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1277.456212] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1277.456212] env[69992]: DEBUG nova.virt.hardware [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1277.456821] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f11d85-755e-4fdf-b4b8-041823ba0fcd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.467314] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b90dba-5157-4ae6-b76b-a2aaf23ad5c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.497473] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897720, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062882} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.497746] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1277.498538] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0754080-c434-4d4f-b2be-e7c23dc66586 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.520987] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] e95e47c2-d82e-4153-8d16-7b65d992e91a/e95e47c2-d82e-4153-8d16-7b65d992e91a.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.521360] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76364164-fc68-4d05-8677-98df8faf588a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.542019] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1277.542019] env[69992]: value = "task-2897721" [ 1277.542019] env[69992]: _type = "Task" [ 1277.542019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.549078] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897721, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.846677] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897719, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.889187] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd2db00-559e-4924-82bf-15c6b581fd5e tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "953c0e0d-3279-444c-b631-6ebbf24e5487" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.240s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.923215] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "dedba037-48a7-4083-925d-5f34e2a27362" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.923215] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.923394] env[69992]: DEBUG nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1277.924240] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efd43f6-450f-4942-a077-ffb2823b6d36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.931831] env[69992]: DEBUG nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1277.966331] env[69992]: DEBUG nova.compute.manager [req-3df1a563-1846-4682-80a4-131d14bf948e req-f7d7c560-a8db-4bc3-980b-d1ef686613fa service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-vif-plugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1277.966596] env[69992]: DEBUG oslo_concurrency.lockutils [req-3df1a563-1846-4682-80a4-131d14bf948e req-f7d7c560-a8db-4bc3-980b-d1ef686613fa service nova] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.966831] env[69992]: DEBUG oslo_concurrency.lockutils [req-3df1a563-1846-4682-80a4-131d14bf948e req-f7d7c560-a8db-4bc3-980b-d1ef686613fa service nova] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.967245] env[69992]: DEBUG oslo_concurrency.lockutils [req-3df1a563-1846-4682-80a4-131d14bf948e req-f7d7c560-a8db-4bc3-980b-d1ef686613fa service nova] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.967584] env[69992]: DEBUG nova.compute.manager [req-3df1a563-1846-4682-80a4-131d14bf948e req-f7d7c560-a8db-4bc3-980b-d1ef686613fa service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] No waiting events found dispatching network-vif-plugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1277.967691] env[69992]: WARNING nova.compute.manager [req-3df1a563-1846-4682-80a4-131d14bf948e req-f7d7c560-a8db-4bc3-980b-d1ef686613fa service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received unexpected event network-vif-plugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 for instance with vm_state building and task_state spawning. [ 1278.051255] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897721, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.103032] env[69992]: DEBUG nova.network.neutron [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Successfully updated port: bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1278.350142] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897719, 'name': PowerOffVM_Task, 'duration_secs': 1.030144} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.350142] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1278.350142] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1278.350142] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfcdb78b-a9fc-48eb-bb16-76586ace0f4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.430630] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1278.430886] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1278.431099] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Deleting the datastore file [datastore2] 9591b360-414b-4aa9-94b2-5b9ccb9e7d39 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1278.431379] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99a97199-c25e-4690-ab05-3f816ba38a31 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.436361] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1278.438977] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-469ab7f4-5abc-4e5d-95a0-1340603fcbe4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.439892] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for the task: (returnval){ [ 1278.439892] env[69992]: value = "task-2897723" [ 1278.439892] env[69992]: _type = "Task" [ 1278.439892] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.448593] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1278.448593] env[69992]: value = "task-2897724" [ 1278.448593] env[69992]: _type = "Task" [ 1278.448593] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.456687] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.462503] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.552952] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897721, 'name': ReconfigVM_Task, 'duration_secs': 0.51448} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.553455] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Reconfigured VM instance instance-00000057 to attach disk [datastore2] e95e47c2-d82e-4153-8d16-7b65d992e91a/e95e47c2-d82e-4153-8d16-7b65d992e91a.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.554254] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4643984-3e5d-49b8-9bf3-8e7b38ffe964 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.567482] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1278.567482] env[69992]: value = "task-2897725" [ 1278.567482] env[69992]: _type = "Task" [ 1278.567482] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.578175] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897725, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.606701] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.606900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.607121] env[69992]: DEBUG nova.network.neutron [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1278.624511] env[69992]: DEBUG nova.compute.manager [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1278.625962] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c391299c-7b74-4459-a10d-d8bfd0339e3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.731309] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01bd461-0f76-4dfa-acfd-fd67f286bef2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.740034] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670f0ff1-8113-4ac3-b2f6-eb10bbb8566b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.777479] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7afdf9-522f-41c3-8955-adf8d93f721e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.786612] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bafab7-f9e8-46e0-a33e-f98daf557f4a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.805136] env[69992]: DEBUG nova.compute.provider_tree [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.950991] env[69992]: DEBUG oslo_vmware.api [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Task: {'id': task-2897723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192606} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.954185] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1278.954443] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1278.954561] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1278.954739] env[69992]: INFO nova.compute.manager [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Took 2.14 seconds to destroy the instance on the hypervisor. [ 1278.954985] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1278.955367] env[69992]: DEBUG nova.compute.manager [-] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1278.955367] env[69992]: DEBUG nova.network.neutron [-] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1278.961866] env[69992]: DEBUG oslo_vmware.api [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897724, 'name': PowerOffVM_Task, 'duration_secs': 0.194697} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.962148] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1278.962323] env[69992]: DEBUG nova.compute.manager [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1278.963153] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad35ca92-bcc5-4d95-a0d3-ba8b73161be9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.026149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.026207] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.079183] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897725, 'name': Rename_Task, 'duration_secs': 0.221785} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.079765] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1279.079900] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbcda75c-194a-4412-aab2-22615b9d77ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.086708] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1279.086708] env[69992]: value = "task-2897726" [ 1279.086708] env[69992]: _type = "Task" [ 1279.086708] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.097921] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.144042] env[69992]: INFO nova.compute.manager [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] instance snapshotting [ 1279.147240] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc00983-3e03-4fdf-8d85-294e1b2def03 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.167805] env[69992]: DEBUG nova.network.neutron [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1279.170249] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b57f59-60ab-469e-8e38-7e17f4996099 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.308253] env[69992]: DEBUG nova.scheduler.client.report [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1279.418647] env[69992]: DEBUG nova.network.neutron [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.480113] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.528699] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1279.604641] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897726, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.682903] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1279.683271] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cde186f9-11a9-45f8-b3cc-e2b31e7ac40a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.691781] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1279.691781] env[69992]: value = "task-2897727" [ 1279.691781] env[69992]: _type = "Task" [ 1279.691781] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.701321] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897727, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.814115] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.814571] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1279.822023] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.541s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.822023] env[69992]: DEBUG nova.objects.instance [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'resources' on Instance uuid 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1279.922146] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.922227] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance network_info: |[{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1279.922792] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:df:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb64cf0b-3b8e-4225-ba71-1524625e60a7', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1279.931857] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1279.932463] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1279.932752] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c38083c1-7222-4512-b91d-8f2e3e20a29a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.961028] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1279.961028] env[69992]: value = "task-2897728" [ 1279.961028] env[69992]: _type = "Task" [ 1279.961028] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.968561] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897728, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.996306] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.057235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.098700] env[69992]: DEBUG nova.network.neutron [-] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.107140] env[69992]: DEBUG oslo_vmware.api [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897726, 'name': PowerOnVM_Task, 'duration_secs': 0.691885} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.109428] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1280.109428] env[69992]: INFO nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1280.109428] env[69992]: DEBUG nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1280.109428] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17204d4-102d-46b6-b000-16dff7758c4a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.204170] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897727, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.213873] env[69992]: DEBUG nova.compute.manager [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1280.219188] env[69992]: DEBUG nova.compute.manager [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing instance network info cache due to event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1280.219188] env[69992]: DEBUG oslo_concurrency.lockutils [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.219188] env[69992]: DEBUG oslo_concurrency.lockutils [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.219188] env[69992]: DEBUG nova.network.neutron [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1280.330046] env[69992]: DEBUG nova.compute.utils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1280.333825] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.333825] env[69992]: DEBUG nova.network.neutron [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1280.376512] env[69992]: DEBUG nova.policy [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '004f89548892435fb7a6bc0517d99150', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd678b222d1054d289ae3094662b32378', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1280.470605] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897728, 'name': CreateVM_Task, 'duration_secs': 0.355676} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.470780] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1280.472322] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.472322] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.472440] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1280.472649] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d68b13a-a324-4727-a7a5-ad8dd7573e9b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.480841] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1280.480841] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524fdb9b-ea04-37eb-0efb-f1038a10b6c3" [ 1280.480841] env[69992]: _type = "Task" [ 1280.480841] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.496059] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524fdb9b-ea04-37eb-0efb-f1038a10b6c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.496059] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.496059] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1280.496059] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.496059] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.496059] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.496059] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67993437-479b-428c-a1fe-ef6c474510e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.505469] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.505469] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1280.505777] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81d8f9fa-c4ee-4476-8bff-b6b359ad4ee2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.511887] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1280.511887] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521a243a-6d30-e3d1-bfd4-3c5d1c8b9dbc" [ 1280.511887] env[69992]: _type = "Task" [ 1280.511887] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.523702] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.523939] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.530645] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521a243a-6d30-e3d1-bfd4-3c5d1c8b9dbc, 'name': SearchDatastore_Task, 'duration_secs': 0.008952} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.531433] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cce486b5-d6e8-4ea7-b0c4-028545341cc8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.543133] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1280.543133] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d7c7bd-751e-30b4-0a6a-79d8e2731699" [ 1280.543133] env[69992]: _type = "Task" [ 1280.543133] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.559502] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d7c7bd-751e-30b4-0a6a-79d8e2731699, 'name': SearchDatastore_Task, 'duration_secs': 0.008781} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.559801] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.560175] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1280.560513] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a29cdad9-9840-45fe-a910-e029ef458a8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.571138] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1280.571138] env[69992]: value = "task-2897729" [ 1280.571138] env[69992]: _type = "Task" [ 1280.571138] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.584284] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.601640] env[69992]: INFO nova.compute.manager [-] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Took 1.65 seconds to deallocate network for instance. [ 1280.633559] env[69992]: INFO nova.compute.manager [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Took 50.38 seconds to build instance. [ 1280.707750] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897727, 'name': CreateSnapshot_Task, 'duration_secs': 0.566071} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.710714] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1280.711762] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0f87d7-8cca-4c84-bbaa-4b5eb9418313 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.741311] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba75a8b-7a86-48af-9485-03695a235dbb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.749359] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388a3b1f-b22a-478f-8177-8051299b2e9d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.787148] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e57a2a1-7126-4e8b-8a23-c0397ef49e56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.800046] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1176789-3a3e-448f-b774-22c078f088c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.814822] env[69992]: DEBUG nova.compute.provider_tree [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.832709] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1280.991392] env[69992]: DEBUG nova.network.neutron [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Successfully created port: eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.026124] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1281.040161] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "dedba037-48a7-4083-925d-5f34e2a27362" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.040506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.040800] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "dedba037-48a7-4083-925d-5f34e2a27362-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.041063] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.041298] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.044520] env[69992]: INFO nova.compute.manager [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Terminating instance [ 1281.084977] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897729, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454979} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.084977] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1281.085166] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1281.085407] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c17b4737-5af1-4096-b901-db24a498d75b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.092236] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1281.092236] env[69992]: value = "task-2897730" [ 1281.092236] env[69992]: _type = "Task" [ 1281.092236] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.103179] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897730, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.108224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.138833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-53eb0324-25de-4ae4-ac86-4f7aea9cd4da tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.896s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.237573] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1281.237900] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-17db2ba4-e029-4e19-b031-1b93ab7c63e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.241949] env[69992]: DEBUG nova.network.neutron [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updated VIF entry in instance network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1281.242179] env[69992]: DEBUG nova.network.neutron [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.249495] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1281.249495] env[69992]: value = "task-2897731" [ 1281.249495] env[69992]: _type = "Task" [ 1281.249495] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.259475] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897731, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.322442] env[69992]: DEBUG nova.scheduler.client.report [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1281.553148] env[69992]: DEBUG nova.compute.manager [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1281.553148] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1281.553148] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c425be2-3641-4f7d-916f-3e2723c9f323 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.556701] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.562095] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1281.562494] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d6a3614-a35b-46b0-82b8-b490ab1331f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.601726] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897730, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065507} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.602394] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1281.603519] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67f6007-8170-42fe-8e1e-eec64c1f1e50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.627863] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1281.630146] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc7f61cf-dbcc-433c-abb1-188367780429 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.652822] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1281.652822] env[69992]: value = "task-2897733" [ 1281.652822] env[69992]: _type = "Task" [ 1281.652822] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.659027] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1281.659404] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1281.659728] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore2] dedba037-48a7-4083-925d-5f34e2a27362 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.662393] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42b52fff-706e-4708-b37a-8114a5c85400 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.666852] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897733, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.670015] env[69992]: DEBUG oslo_vmware.api [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1281.670015] env[69992]: value = "task-2897734" [ 1281.670015] env[69992]: _type = "Task" [ 1281.670015] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.676812] env[69992]: DEBUG oslo_vmware.api [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.745590] env[69992]: DEBUG oslo_concurrency.lockutils [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.745965] env[69992]: DEBUG nova.compute.manager [req-6550fdc6-ae64-4d72-9aaa-8d01c6744e25 req-24079dd7-6a06-4fd7-8bad-78738d5aaed2 service nova] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Received event network-vif-deleted-24614f86-0f65-4b7b-b425-05b92f02312b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1281.762561] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897731, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.828179] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.831450] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.073s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.831623] env[69992]: DEBUG nova.objects.instance [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'resources' on Instance uuid c4bd5585-d917-4d92-9ce8-fa1950944f25 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1281.842637] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1281.849425] env[69992]: INFO nova.scheduler.client.report [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted allocations for instance 5f98a2aa-eb7b-41d2-9e9f-14cee9445942 [ 1281.874656] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1281.874963] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1281.875188] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1281.875421] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1281.875613] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1281.875820] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1281.876184] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1281.876397] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1281.876616] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1281.876854] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1281.877093] env[69992]: DEBUG nova.virt.hardware [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1281.878151] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff952c0-1788-4585-91b4-a67dd4ea1678 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.889071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8990b0-fbfd-4407-80ef-426be3e039bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.164669] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897733, 'name': ReconfigVM_Task, 'duration_secs': 0.332871} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.165121] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1282.165979] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9763f71-4712-4421-b917-3124ec356720 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.174574] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1282.174574] env[69992]: value = "task-2897735" [ 1282.174574] env[69992]: _type = "Task" [ 1282.174574] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.186316] env[69992]: DEBUG oslo_vmware.api [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166635} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.187086] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1282.188900] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1282.189256] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1282.189500] env[69992]: INFO nova.compute.manager [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1282.189793] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1282.197941] env[69992]: DEBUG nova.compute.manager [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1282.198121] env[69992]: DEBUG nova.network.neutron [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1282.199966] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897735, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.264074] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897731, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.325128] env[69992]: DEBUG nova.compute.manager [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Received event network-changed-e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1282.325128] env[69992]: DEBUG nova.compute.manager [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Refreshing instance network info cache due to event network-changed-e6b569b5-e098-47d5-80c8-8c95df681396. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1282.325128] env[69992]: DEBUG oslo_concurrency.lockutils [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] Acquiring lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.325128] env[69992]: DEBUG oslo_concurrency.lockutils [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] Acquired lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.325128] env[69992]: DEBUG nova.network.neutron [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Refreshing network info cache for port e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1282.337990] env[69992]: DEBUG nova.objects.instance [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'numa_topology' on Instance uuid c4bd5585-d917-4d92-9ce8-fa1950944f25 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1282.358985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e59ddeb-2a6e-43eb-9e3d-264efc162f35 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "5f98a2aa-eb7b-41d2-9e9f-14cee9445942" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.939s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.667190] env[69992]: DEBUG nova.compute.manager [req-428be59e-75df-4d95-a70a-5416e52e9a87 req-68b9e5cc-1f3e-4443-9ae2-f80764a818a0 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Received event network-vif-deleted-0cb0f0fd-a197-4688-a99f-231754ad8820 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1282.667403] env[69992]: INFO nova.compute.manager [req-428be59e-75df-4d95-a70a-5416e52e9a87 req-68b9e5cc-1f3e-4443-9ae2-f80764a818a0 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Neutron deleted interface 0cb0f0fd-a197-4688-a99f-231754ad8820; detaching it from the instance and deleting it from the info cache [ 1282.667577] env[69992]: DEBUG nova.network.neutron [req-428be59e-75df-4d95-a70a-5416e52e9a87 req-68b9e5cc-1f3e-4443-9ae2-f80764a818a0 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.685482] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897735, 'name': Rename_Task, 'duration_secs': 0.144511} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.686357] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1282.686614] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e48a8c0-26d7-4828-ac5d-8f4e6d283c04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.697694] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1282.697694] env[69992]: value = "task-2897736" [ 1282.697694] env[69992]: _type = "Task" [ 1282.697694] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.700435] env[69992]: DEBUG nova.network.neutron [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Successfully updated port: eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1282.714142] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897736, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.765159] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897731, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.841192] env[69992]: DEBUG nova.objects.base [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1283.062461] env[69992]: DEBUG nova.network.neutron [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updated VIF entry in instance network info cache for port e6b569b5-e098-47d5-80c8-8c95df681396. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1283.062856] env[69992]: DEBUG nova.network.neutron [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updating instance_info_cache with network_info: [{"id": "e6b569b5-e098-47d5-80c8-8c95df681396", "address": "fa:16:3e:7f:9a:d9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b569b5-e0", "ovs_interfaceid": "e6b569b5-e098-47d5-80c8-8c95df681396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.074744] env[69992]: DEBUG nova.network.neutron [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.158042] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde9b78f-b42c-4ca5-84f8-6178aab51675 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.167719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e5fda9-16e1-4cf6-aa48-fd3c9c267c44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.170937] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-131ecfb1-7f81-4bcd-9f61-98c12d505644 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.200672] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ce29b1-b9f0-424f-8cb6-c0515f434972 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.205528] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "refresh_cache-ae681491-c03e-486f-b763-0ebfa4dcd669" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.205696] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquired lock "refresh_cache-ae681491-c03e-486f-b763-0ebfa4dcd669" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.205851] env[69992]: DEBUG nova.network.neutron [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1283.215132] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a2982f-da55-4e45-a53c-27b26c86e93d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.238875] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea375e20-b8a3-4bf2-8cec-73274de07d97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.247907] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897736, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.262479] env[69992]: DEBUG nova.compute.manager [req-428be59e-75df-4d95-a70a-5416e52e9a87 req-68b9e5cc-1f3e-4443-9ae2-f80764a818a0 service nova] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Detach interface failed, port_id=0cb0f0fd-a197-4688-a99f-231754ad8820, reason: Instance dedba037-48a7-4083-925d-5f34e2a27362 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1283.275664] env[69992]: DEBUG nova.compute.provider_tree [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.282845] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897731, 'name': CloneVM_Task, 'duration_secs': 1.976196} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.283416] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Created linked-clone VM from snapshot [ 1283.284432] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ba17ec-5946-40b7-b0cb-fa179a8808db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.294163] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Uploading image c327a86e-681a-44c7-a5ee-2170fba24c86 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1283.313424] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1283.314095] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-910d0739-b496-48e6-ba71-1d3ec9695db6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.323652] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1283.323652] env[69992]: value = "task-2897737" [ 1283.323652] env[69992]: _type = "Task" [ 1283.323652] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.331910] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897737, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.571204] env[69992]: DEBUG oslo_concurrency.lockutils [req-781024a2-6183-429c-b52d-f59ff7dcc7c7 req-d6fe19b1-e205-4077-831a-69514aa95a18 service nova] Releasing lock "refresh_cache-e95e47c2-d82e-4153-8d16-7b65d992e91a" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.578108] env[69992]: INFO nova.compute.manager [-] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Took 1.38 seconds to deallocate network for instance. [ 1283.714743] env[69992]: DEBUG oslo_vmware.api [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897736, 'name': PowerOnVM_Task, 'duration_secs': 0.650055} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.714999] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1283.715210] env[69992]: INFO nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1283.715379] env[69992]: DEBUG nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1283.716180] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b72bd2-8932-40d7-8998-d3f7db72e00b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.774158] env[69992]: DEBUG nova.network.neutron [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1283.780366] env[69992]: DEBUG nova.scheduler.client.report [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1283.832052] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897737, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.906763] env[69992]: DEBUG nova.network.neutron [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Updating instance_info_cache with network_info: [{"id": "eedc1d69-4b17-4629-b3a3-7adea2449215", "address": "fa:16:3e:cd:da:19", "network": {"id": "03b51e76-9d1a-4ca0-836c-2a43063b10db", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-129219416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d678b222d1054d289ae3094662b32378", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeedc1d69-4b", "ovs_interfaceid": "eedc1d69-4b17-4629-b3a3-7adea2449215", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.084955] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.233897] env[69992]: INFO nova.compute.manager [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Took 50.92 seconds to build instance. [ 1284.287136] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.455s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.290833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.204s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.290833] env[69992]: DEBUG nova.objects.instance [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'resources' on Instance uuid 1b4da2ab-d026-45d8-8234-79ddd84d5cbb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1284.334568] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897737, 'name': Destroy_Task, 'duration_secs': 0.883257} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.334873] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Destroyed the VM [ 1284.335164] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1284.336072] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-820fa814-e034-40d9-91bf-b9f319115aca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.344270] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1284.344270] env[69992]: value = "task-2897738" [ 1284.344270] env[69992]: _type = "Task" [ 1284.344270] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.354899] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897738, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.409549] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Releasing lock "refresh_cache-ae681491-c03e-486f-b763-0ebfa4dcd669" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.410230] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Instance network_info: |[{"id": "eedc1d69-4b17-4629-b3a3-7adea2449215", "address": "fa:16:3e:cd:da:19", "network": {"id": "03b51e76-9d1a-4ca0-836c-2a43063b10db", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-129219416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d678b222d1054d289ae3094662b32378", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeedc1d69-4b", "ovs_interfaceid": "eedc1d69-4b17-4629-b3a3-7adea2449215", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1284.410495] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:da:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eedc1d69-4b17-4629-b3a3-7adea2449215', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1284.418392] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Creating folder: Project (d678b222d1054d289ae3094662b32378). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1284.419202] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-952ff910-e2da-4efa-b584-0cf771bae173 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.433803] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Created folder: Project (d678b222d1054d289ae3094662b32378) in parent group-v581821. [ 1284.434047] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Creating folder: Instances. Parent ref: group-v582068. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1284.434311] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db527613-5132-4784-a2b2-45b2b6caba17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.446914] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Created folder: Instances in parent group-v582068. [ 1284.447258] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1284.447838] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1284.448081] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1a27b07-2517-4867-a07d-7f962fe442b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.469729] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1284.469729] env[69992]: value = "task-2897741" [ 1284.469729] env[69992]: _type = "Task" [ 1284.469729] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.480306] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897741, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.573548] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.573690] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.699449] env[69992]: DEBUG nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Received event network-vif-plugged-eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.699743] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Acquiring lock "ae681491-c03e-486f-b763-0ebfa4dcd669-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.700071] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.700331] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.700582] env[69992]: DEBUG nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] No waiting events found dispatching network-vif-plugged-eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1284.700828] env[69992]: WARNING nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Received unexpected event network-vif-plugged-eedc1d69-4b17-4629-b3a3-7adea2449215 for instance with vm_state building and task_state spawning. [ 1284.701087] env[69992]: DEBUG nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Received event network-changed-eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.701321] env[69992]: DEBUG nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Refreshing instance network info cache due to event network-changed-eedc1d69-4b17-4629-b3a3-7adea2449215. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1284.701582] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Acquiring lock "refresh_cache-ae681491-c03e-486f-b763-0ebfa4dcd669" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.701791] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Acquired lock "refresh_cache-ae681491-c03e-486f-b763-0ebfa4dcd669" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.702022] env[69992]: DEBUG nova.network.neutron [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Refreshing network info cache for port eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1284.737512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d2824820-c0b8-41ab-b43c-633b53df1194 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.430s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.800242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-348ed63f-0edc-4899-8db9-27fd510cf91d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 54.788s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.801309] env[69992]: DEBUG oslo_concurrency.lockutils [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 35.382s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.801550] env[69992]: DEBUG oslo_concurrency.lockutils [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.801831] env[69992]: DEBUG oslo_concurrency.lockutils [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.801921] env[69992]: DEBUG oslo_concurrency.lockutils [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.806647] env[69992]: INFO nova.compute.manager [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Terminating instance [ 1284.855795] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897738, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.979264] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897741, 'name': CreateVM_Task, 'duration_secs': 0.426573} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.981613] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1284.982470] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.982636] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.982978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1284.983249] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5811a99f-59ad-450e-b6f8-9edbdcb30600 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.988420] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1284.988420] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5230772f-aced-5d14-e35b-02f639f8c2ca" [ 1284.988420] env[69992]: _type = "Task" [ 1284.988420] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.998721] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5230772f-aced-5d14-e35b-02f639f8c2ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.066801] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d106fec-b6e5-4569-9c7b-3924e5849862 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.074048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66b2029-7a27-425e-b37c-354e72e67843 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.077467] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1285.111928] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c56bcb-76eb-4a44-81ec-91c77a92c815 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.120631] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45599020-6360-4d60-aa42-1d085cab0c09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.134763] env[69992]: DEBUG nova.compute.provider_tree [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.313585] env[69992]: DEBUG nova.compute.manager [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1285.313866] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1285.314144] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47f876eb-6ac1-40f1-8479-48e2427a5185 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.323211] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45d6391-2820-4816-87b1-443e8fc411c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.358901] env[69992]: WARNING nova.virt.vmwareapi.vmops [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c4bd5585-d917-4d92-9ce8-fa1950944f25 could not be found. [ 1285.359129] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1285.359353] env[69992]: INFO nova.compute.manager [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1285.359555] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.360190] env[69992]: DEBUG nova.compute.manager [-] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1285.360290] env[69992]: DEBUG nova.network.neutron [-] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1285.364810] env[69992]: DEBUG oslo_vmware.api [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897738, 'name': RemoveSnapshot_Task, 'duration_secs': 0.871809} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.365324] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1285.443838] env[69992]: DEBUG nova.network.neutron [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Updated VIF entry in instance network info cache for port eedc1d69-4b17-4629-b3a3-7adea2449215. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1285.444218] env[69992]: DEBUG nova.network.neutron [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Updating instance_info_cache with network_info: [{"id": "eedc1d69-4b17-4629-b3a3-7adea2449215", "address": "fa:16:3e:cd:da:19", "network": {"id": "03b51e76-9d1a-4ca0-836c-2a43063b10db", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-129219416-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d678b222d1054d289ae3094662b32378", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeedc1d69-4b", "ovs_interfaceid": "eedc1d69-4b17-4629-b3a3-7adea2449215", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.499053] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5230772f-aced-5d14-e35b-02f639f8c2ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010369} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.499053] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.499243] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1285.499334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.499474] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.499651] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1285.499906] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-113b0646-1cf8-4e8b-b6cb-02191ff364cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.513453] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1285.513643] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1285.514397] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d44f69f4-da1c-4ed4-a6d9-57c1287fd860 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.519401] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1285.519401] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5290cf65-f5d6-1dff-128c-6fe09e517455" [ 1285.519401] env[69992]: _type = "Task" [ 1285.519401] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.527142] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5290cf65-f5d6-1dff-128c-6fe09e517455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.604060] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.638009] env[69992]: DEBUG nova.scheduler.client.report [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1285.872680] env[69992]: WARNING nova.compute.manager [None req-3437c8f7-7776-4afb-a1c3-235fea543d36 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Image not found during snapshot: nova.exception.ImageNotFound: Image c327a86e-681a-44c7-a5ee-2170fba24c86 could not be found. [ 1285.947247] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Releasing lock "refresh_cache-ae681491-c03e-486f-b763-0ebfa4dcd669" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.947986] env[69992]: DEBUG nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1285.947986] env[69992]: DEBUG nova.compute.manager [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing instance network info cache due to event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1285.947986] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.948160] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.948286] env[69992]: DEBUG nova.network.neutron [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1286.030102] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5290cf65-f5d6-1dff-128c-6fe09e517455, 'name': SearchDatastore_Task, 'duration_secs': 0.021032} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.030908] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb2eba68-954e-4288-a578-b3770bf5d207 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.037078] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1286.037078] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52174f08-8df4-6b46-a261-21dbddaf5581" [ 1286.037078] env[69992]: _type = "Task" [ 1286.037078] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.043989] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52174f08-8df4-6b46-a261-21dbddaf5581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.120678] env[69992]: DEBUG nova.network.neutron [-] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.143322] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.854s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.146704] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.758s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.146704] env[69992]: DEBUG nova.objects.instance [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lazy-loading 'resources' on Instance uuid 7fa33d98-20b7-4162-a354-24cfea17701f {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.163918] env[69992]: INFO nova.scheduler.client.report [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleted allocations for instance 1b4da2ab-d026-45d8-8234-79ddd84d5cbb [ 1286.554489] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52174f08-8df4-6b46-a261-21dbddaf5581, 'name': SearchDatastore_Task, 'duration_secs': 0.015744} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.554878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.555216] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ae681491-c03e-486f-b763-0ebfa4dcd669/ae681491-c03e-486f-b763-0ebfa4dcd669.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1286.556141] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2a0ef62-cdc6-41a8-8db3-e9637878f595 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.562691] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1286.562691] env[69992]: value = "task-2897742" [ 1286.562691] env[69992]: _type = "Task" [ 1286.562691] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.570296] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.622972] env[69992]: INFO nova.compute.manager [-] [instance: c4bd5585-d917-4d92-9ce8-fa1950944f25] Took 1.26 seconds to deallocate network for instance. [ 1286.671253] env[69992]: DEBUG oslo_concurrency.lockutils [None req-67b6f6ad-1a33-49ad-a321-e46e6bbb0d23 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "1b4da2ab-d026-45d8-8234-79ddd84d5cbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.483s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.857217] env[69992]: DEBUG nova.network.neutron [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updated VIF entry in instance network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1286.857578] env[69992]: DEBUG nova.network.neutron [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.859020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "c6e4f19b-7264-4eea-a472-f64a68d4df22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.859107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.859268] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "c6e4f19b-7264-4eea-a472-f64a68d4df22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.859491] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.859668] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.861646] env[69992]: INFO nova.compute.manager [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Terminating instance [ 1286.899275] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd51e2e1-0675-4ce7-8966-a5073aba9612 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.907182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafdec0c-aef3-441a-84c2-96f4f19276bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.941048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab0a723-a287-4eb9-9fb4-a7aff2962f4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.949494] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971d75ad-cb96-4078-b77b-ca6c611df60c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.962770] env[69992]: DEBUG nova.compute.provider_tree [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.073876] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897742, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.362576] env[69992]: DEBUG oslo_concurrency.lockutils [req-e4a6d6b1-1c46-4317-9ea7-603ac09cce92 req-fbd10ead-7386-4cdc-abfc-3e0ea3d65d6c service nova] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.364538] env[69992]: DEBUG nova.compute.manager [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1287.364741] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1287.365635] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5564e0-8cae-4fdb-984b-a07c67c7fb7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.373421] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1287.373665] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc8f37dd-6895-4737-a256-c9b8d99129c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.380083] env[69992]: DEBUG oslo_vmware.api [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1287.380083] env[69992]: value = "task-2897744" [ 1287.380083] env[69992]: _type = "Task" [ 1287.380083] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.387807] env[69992]: DEBUG oslo_vmware.api [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.469291] env[69992]: DEBUG nova.scheduler.client.report [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1287.573699] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897742, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.823346} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.573977] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ae681491-c03e-486f-b763-0ebfa4dcd669/ae681491-c03e-486f-b763-0ebfa4dcd669.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1287.574197] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1287.574453] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c28eed6-b286-4295-a185-ee64fda7c995 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.580968] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1287.580968] env[69992]: value = "task-2897745" [ 1287.580968] env[69992]: _type = "Task" [ 1287.580968] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.588228] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897745, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.656530] env[69992]: DEBUG oslo_concurrency.lockutils [None req-13b98d8b-21d7-4648-b10d-ae3f769c110d tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "c4bd5585-d917-4d92-9ce8-fa1950944f25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.855s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.892661] env[69992]: DEBUG oslo_vmware.api [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897744, 'name': PowerOffVM_Task, 'duration_secs': 0.287027} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.892927] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1287.893117] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1287.893368] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e6858a9-0e02-4676-8cb9-c05969543323 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.958934] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1287.959198] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1287.959391] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleting the datastore file [datastore2] c6e4f19b-7264-4eea-a472-f64a68d4df22 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1287.959664] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34966ab7-e20c-4eb1-a565-6e6c15ebcd65 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.965409] env[69992]: DEBUG oslo_vmware.api [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for the task: (returnval){ [ 1287.965409] env[69992]: value = "task-2897747" [ 1287.965409] env[69992]: _type = "Task" [ 1287.965409] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.972971] env[69992]: DEBUG oslo_vmware.api [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.974740] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.976808] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.872s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.999852] env[69992]: INFO nova.scheduler.client.report [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Deleted allocations for instance 7fa33d98-20b7-4162-a354-24cfea17701f [ 1288.091757] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897745, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066612} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.092038] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1288.092826] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c2f90a-c3c5-4b93-88bb-4e057463e122 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.115353] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] ae681491-c03e-486f-b763-0ebfa4dcd669/ae681491-c03e-486f-b763-0ebfa4dcd669.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1288.115906] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65dc1f8c-4381-43af-bd09-2d77ccacd143 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.134976] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1288.134976] env[69992]: value = "task-2897748" [ 1288.134976] env[69992]: _type = "Task" [ 1288.134976] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.142858] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.476750] env[69992]: DEBUG oslo_vmware.api [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Task: {'id': task-2897747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431726} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.477025] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1288.477260] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1288.477440] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1288.477616] env[69992]: INFO nova.compute.manager [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1288.477915] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1288.478273] env[69992]: DEBUG nova.compute.manager [-] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1288.478273] env[69992]: DEBUG nova.network.neutron [-] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1288.508291] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d9aa8ac4-07ee-4205-b8b3-d2306620d6ea tempest-ServerAddressesNegativeTestJSON-417107254 tempest-ServerAddressesNegativeTestJSON-417107254-project-member] Lock "7fa33d98-20b7-4162-a354-24cfea17701f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.114s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.650237] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897748, 'name': ReconfigVM_Task, 'duration_secs': 0.264565} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.650591] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Reconfigured VM instance instance-00000059 to attach disk [datastore1] ae681491-c03e-486f-b763-0ebfa4dcd669/ae681491-c03e-486f-b763-0ebfa4dcd669.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1288.651318] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ffac1a5-184e-4672-ad93-5f0798f41279 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.658255] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1288.658255] env[69992]: value = "task-2897749" [ 1288.658255] env[69992]: _type = "Task" [ 1288.658255] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.667988] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897749, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.915582] env[69992]: DEBUG nova.compute.manager [req-4f3b11fa-2a69-48c9-9874-f4c71abbefcf req-d5e2343d-1c9f-43f1-aba2-c1ab80c7c9fa service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Received event network-vif-deleted-087e09d7-8b3f-4d66-9008-d97120de9f30 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.915794] env[69992]: INFO nova.compute.manager [req-4f3b11fa-2a69-48c9-9874-f4c71abbefcf req-d5e2343d-1c9f-43f1-aba2-c1ab80c7c9fa service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Neutron deleted interface 087e09d7-8b3f-4d66-9008-d97120de9f30; detaching it from the instance and deleting it from the info cache [ 1288.915972] env[69992]: DEBUG nova.network.neutron [req-4f3b11fa-2a69-48c9-9874-f4c71abbefcf req-d5e2343d-1c9f-43f1-aba2-c1ab80c7c9fa service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.995223] env[69992]: INFO nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating resource usage from migration d6ea2928-95fb-421c-896a-12bb1a9338e8 [ 1289.017715] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance eec50935-f553-43c7-b67b-7289299745bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.017876] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance a8813822-f77b-4b73-a6dc-e0eab83b0402 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.018012] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fcbe1142-72dc-4a02-af9b-e03a2031a247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.018144] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.018288] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 9591b360-414b-4aa9-94b2-5b9ccb9e7d39 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1289.018482] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 57702674-4c96-4577-a93f-24ecffebb3a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.018655] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance dedba037-48a7-4083-925d-5f34e2a27362 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1289.018800] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.019046] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance c6e4f19b-7264-4eea-a472-f64a68d4df22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.019046] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e95e47c2-d82e-4153-8d16-7b65d992e91a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.019156] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 08869f38-9609-4f7f-9110-2f26fd1cb3f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.019257] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance ae681491-c03e-486f-b763-0ebfa4dcd669 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.019378] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration d6ea2928-95fb-421c-896a-12bb1a9338e8 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1289.019493] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 31109fbd-ebc0-422d-a705-7d0e59d4bbb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.061757] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "45a00234-7ebf-4835-bad3-30474bb27148" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.062013] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.167626] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897749, 'name': Rename_Task, 'duration_secs': 0.244223} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.167903] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1289.168201] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89be0dad-346c-4aa2-bf6a-5d828a54c30b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.175590] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1289.175590] env[69992]: value = "task-2897750" [ 1289.175590] env[69992]: _type = "Task" [ 1289.175590] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.183401] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897750, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.400039] env[69992]: DEBUG nova.network.neutron [-] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.418465] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5312d0cb-77ea-4e41-accd-d15ebdfbd726 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.428469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd55cdf4-5b30-4413-a8ca-ef3303f170ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.464569] env[69992]: DEBUG nova.compute.manager [req-4f3b11fa-2a69-48c9-9874-f4c71abbefcf req-d5e2343d-1c9f-43f1-aba2-c1ab80c7c9fa service nova] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Detach interface failed, port_id=087e09d7-8b3f-4d66-9008-d97120de9f30, reason: Instance c6e4f19b-7264-4eea-a472-f64a68d4df22 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1289.524068] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.567088] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1289.690719] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897750, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.780585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.780828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.903476] env[69992]: INFO nova.compute.manager [-] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Took 1.43 seconds to deallocate network for instance. [ 1290.027720] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance be28d7a8-6566-45aa-8b4c-08c7eb29864d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.090522] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.187499] env[69992]: DEBUG oslo_vmware.api [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897750, 'name': PowerOnVM_Task, 'duration_secs': 0.770918} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.187816] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1290.188046] env[69992]: INFO nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1290.188236] env[69992]: DEBUG nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1290.189047] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0074f5-d405-45bd-9e16-484cc102990b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.285050] env[69992]: DEBUG nova.compute.utils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1290.414831] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.531953] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.706583] env[69992]: INFO nova.compute.manager [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Took 43.64 seconds to build instance. [ 1290.788605] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.867837] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "a8813822-f77b-4b73-a6dc-e0eab83b0402" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.867837] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.868231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "a8813822-f77b-4b73-a6dc-e0eab83b0402-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.868283] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.868421] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.870763] env[69992]: INFO nova.compute.manager [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Terminating instance [ 1291.035444] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1291.208938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0593b54-544d-46f8-bfa1-c56ab07569d6 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.152s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.374591] env[69992]: DEBUG nova.compute.manager [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1291.374826] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1291.375707] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6e40fa-6f10-4d04-ae94-fed0b9024742 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.384638] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1291.384922] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5acf82bc-393d-4479-aa7d-76287b518533 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.391065] env[69992]: DEBUG oslo_vmware.api [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1291.391065] env[69992]: value = "task-2897751" [ 1291.391065] env[69992]: _type = "Task" [ 1291.391065] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.400194] env[69992]: DEBUG oslo_vmware.api [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.539128] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 6ccc70f5-4857-4af3-99a1-f60ec35aebaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1291.873149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.873149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.873149] env[69992]: INFO nova.compute.manager [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Attaching volume 6e1d27f6-c008-45f2-a5e6-b07f86c5443b to /dev/sdb [ 1291.901688] env[69992]: DEBUG oslo_vmware.api [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897751, 'name': PowerOffVM_Task, 'duration_secs': 0.251437} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.902094] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1291.902188] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1291.903137] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-748e5e97-8081-47ec-a9f8-6b800f570505 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.905151] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ff0701-8045-4600-b386-d06d91545633 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.913141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ee8da4-3be2-420c-bc53-1e35ada00a6d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.926587] env[69992]: DEBUG nova.virt.block_device [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updating existing volume attachment record: d761abc1-c3cd-46dd-94f0-5a725ddf0ad6 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1291.978771] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1291.979011] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1291.979333] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleting the datastore file [datastore1] a8813822-f77b-4b73-a6dc-e0eab83b0402 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.979482] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c40c3472-c91a-43d9-9444-1d28dba6be13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.986905] env[69992]: DEBUG oslo_vmware.api [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1291.986905] env[69992]: value = "task-2897753" [ 1291.986905] env[69992]: _type = "Task" [ 1291.986905] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.995287] env[69992]: DEBUG oslo_vmware.api [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.042301] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 45a00234-7ebf-4835-bad3-30474bb27148 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.042598] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1292.042755] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1292.286295] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d60e764-2ea3-4a2c-8083-a3b41819302a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.295683] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8372b9-81a2-4813-8ca8-06d0bb887e45 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.323857] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7986fcf1-f717-43c2-9473-fe24f0265009 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.331454] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5c201d-3ed9-4020-8bc5-1062605fc95a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.346902] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.380897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "ae681491-c03e-486f-b763-0ebfa4dcd669" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.381199] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.381411] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "ae681491-c03e-486f-b763-0ebfa4dcd669-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.381594] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.381830] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.384013] env[69992]: INFO nova.compute.manager [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Terminating instance [ 1292.496846] env[69992]: DEBUG oslo_vmware.api [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180333} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.497168] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.497383] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1292.497583] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1292.497790] env[69992]: INFO nova.compute.manager [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1292.498106] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1292.498403] env[69992]: DEBUG nova.compute.manager [-] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1292.498519] env[69992]: DEBUG nova.network.neutron [-] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1292.763799] env[69992]: DEBUG nova.compute.manager [req-7e5731c0-ba2c-4dc4-af9c-71b74810006f req-40f6089e-ed2e-4add-9f12-a165810fc42e service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Received event network-vif-deleted-a225b5fb-43a1-478e-bb4d-0436f27e0475 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1292.764040] env[69992]: INFO nova.compute.manager [req-7e5731c0-ba2c-4dc4-af9c-71b74810006f req-40f6089e-ed2e-4add-9f12-a165810fc42e service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Neutron deleted interface a225b5fb-43a1-478e-bb4d-0436f27e0475; detaching it from the instance and deleting it from the info cache [ 1292.764122] env[69992]: DEBUG nova.network.neutron [req-7e5731c0-ba2c-4dc4-af9c-71b74810006f req-40f6089e-ed2e-4add-9f12-a165810fc42e service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.850826] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.887305] env[69992]: DEBUG nova.compute.manager [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1292.887594] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1292.888410] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714a8be2-e7f4-4105-8691-3ff86db8c605 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.897029] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1292.897029] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8da277c8-a753-41b3-874e-fc8d56b898d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.903331] env[69992]: DEBUG oslo_vmware.api [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1292.903331] env[69992]: value = "task-2897757" [ 1292.903331] env[69992]: _type = "Task" [ 1292.903331] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.910670] env[69992]: DEBUG oslo_vmware.api [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.247359] env[69992]: DEBUG nova.network.neutron [-] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.266359] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-572b6756-9dbb-4e07-aa3c-a8cdd9e30c34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.275991] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c07523-e9ac-4486-a3b5-81f537ee2f8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.306248] env[69992]: DEBUG nova.compute.manager [req-7e5731c0-ba2c-4dc4-af9c-71b74810006f req-40f6089e-ed2e-4add-9f12-a165810fc42e service nova] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Detach interface failed, port_id=a225b5fb-43a1-478e-bb4d-0436f27e0475, reason: Instance a8813822-f77b-4b73-a6dc-e0eab83b0402 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1293.355927] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1293.356114] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.379s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.356411] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.523s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.412778] env[69992]: DEBUG oslo_vmware.api [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897757, 'name': PowerOffVM_Task, 'duration_secs': 0.187629} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.413057] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1293.413236] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1293.413482] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ab825a2-e186-42b0-a404-ec6f73d6e571 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.473529] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1293.473785] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1293.473984] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Deleting the datastore file [datastore1] ae681491-c03e-486f-b763-0ebfa4dcd669 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1293.474267] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3cde422-5db1-49ff-87c7-ef68e5d30300 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.481788] env[69992]: DEBUG oslo_vmware.api [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for the task: (returnval){ [ 1293.481788] env[69992]: value = "task-2897759" [ 1293.481788] env[69992]: _type = "Task" [ 1293.481788] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.489789] env[69992]: DEBUG oslo_vmware.api [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.750763] env[69992]: INFO nova.compute.manager [-] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Took 1.25 seconds to deallocate network for instance. [ 1293.862220] env[69992]: INFO nova.compute.claims [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1293.993466] env[69992]: DEBUG oslo_vmware.api [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Task: {'id': task-2897759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130755} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.993788] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1293.993883] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1293.994053] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1293.994237] env[69992]: INFO nova.compute.manager [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1293.994478] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1293.994674] env[69992]: DEBUG nova.compute.manager [-] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1293.994771] env[69992]: DEBUG nova.network.neutron [-] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1294.256416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.370596] env[69992]: INFO nova.compute.resource_tracker [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating resource usage from migration d6ea2928-95fb-421c-896a-12bb1a9338e8 [ 1294.592456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6492445-4267-4916-8131-2eda3888b18c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.600347] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d56c186-7ff5-4672-8525-c7fe82e815c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.629193] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03bc1433-5c6e-4b8f-aabd-e3d9f1579fa9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.635808] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344ed987-b3bf-4c5e-90a6-57a110fdb989 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.649478] env[69992]: DEBUG nova.compute.provider_tree [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.741029] env[69992]: DEBUG nova.network.neutron [-] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.787863] env[69992]: DEBUG nova.compute.manager [req-9f118513-c373-49ca-ad69-c40ec4c4f0e6 req-6575eabd-68dd-4251-9985-c5ffa3b2cffc service nova] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Received event network-vif-deleted-eedc1d69-4b17-4629-b3a3-7adea2449215 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1295.153299] env[69992]: DEBUG nova.scheduler.client.report [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1295.243313] env[69992]: INFO nova.compute.manager [-] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Took 1.25 seconds to deallocate network for instance. [ 1295.657981] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.301s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.658235] env[69992]: INFO nova.compute.manager [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Migrating [ 1295.665094] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.718s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.666464] env[69992]: INFO nova.compute.claims [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1295.749766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.178352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.178352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.178659] env[69992]: DEBUG nova.network.neutron [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1296.473170] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1296.473417] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582072', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'name': 'volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '57702674-4c96-4577-a93f-24ecffebb3a7', 'attached_at': '', 'detached_at': '', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'serial': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1296.474339] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b1eafa-5493-4543-a86a-888d0b0e64ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.491111] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de06217-fda0-428f-9ef1-65c469cb5058 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.517252] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b/volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1296.517644] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd396a0e-9e1d-4aed-9323-f8d79e4b35a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.539613] env[69992]: DEBUG oslo_vmware.api [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1296.539613] env[69992]: value = "task-2897761" [ 1296.539613] env[69992]: _type = "Task" [ 1296.539613] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.551966] env[69992]: DEBUG oslo_vmware.api [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897761, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.885602] env[69992]: DEBUG nova.network.neutron [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [{"id": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "address": "fa:16:3e:7b:a7:d0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15455be6-d2", "ovs_interfaceid": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.946159] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ce4bc8-db64-41ce-8b31-fa3b291000b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.953108] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019c43be-6ebb-4ea0-aa59-49bf02f46711 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.982617] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9232789f-426d-4324-b586-0fa4184f1099 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.990537] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0353e3e4-d4fe-4b33-aa2e-275bada676e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.003009] env[69992]: DEBUG nova.compute.provider_tree [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.048593] env[69992]: DEBUG oslo_vmware.api [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897761, 'name': ReconfigVM_Task, 'duration_secs': 0.347581} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.048855] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b/volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1297.053546] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fd6bc8b-f588-4fc8-a42c-90c515315ce5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.067769] env[69992]: DEBUG oslo_vmware.api [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1297.067769] env[69992]: value = "task-2897762" [ 1297.067769] env[69992]: _type = "Task" [ 1297.067769] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.076603] env[69992]: DEBUG oslo_vmware.api [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897762, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.388745] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.507503] env[69992]: DEBUG nova.scheduler.client.report [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1297.577595] env[69992]: DEBUG oslo_vmware.api [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897762, 'name': ReconfigVM_Task, 'duration_secs': 0.152321} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.577923] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582072', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'name': 'volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '57702674-4c96-4577-a93f-24ecffebb3a7', 'attached_at': '', 'detached_at': '', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'serial': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1298.012581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.013202] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1298.016039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.195s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.017647] env[69992]: INFO nova.compute.claims [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.524558] env[69992]: DEBUG nova.compute.utils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1298.528052] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1298.528052] env[69992]: DEBUG nova.network.neutron [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1298.574524] env[69992]: DEBUG nova.policy [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6673cee8e04b968c6afd54f9b51b74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1471cdd6671b4e6ebc23b8fc2b120b63', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1298.611683] env[69992]: DEBUG nova.objects.instance [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lazy-loading 'flavor' on Instance uuid 57702674-4c96-4577-a93f-24ecffebb3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1298.831599] env[69992]: DEBUG nova.network.neutron [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Successfully created port: 0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1298.907782] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c4283e-e060-4b02-a978-b204ea49910a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.929271] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1299.028167] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1299.116961] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3759558-7c7e-4505-bf4b-a30d0812eda3 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.246s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.380457] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29305064-2e5d-4e67-817b-5a65b402dd1d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.389057] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1696f433-3f51-4f09-aab8-a06a1c9bfc9c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.422373] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b493a6-61a5-4911-8d48-df99068e6aee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.435710] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534bf73-3ecb-4e0b-af49-f188787b389d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.439162] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.439773] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f51a7a4c-eded-4d9a-b0f7-945f874a06b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.453800] env[69992]: DEBUG nova.compute.provider_tree [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.457212] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1299.457212] env[69992]: value = "task-2897763" [ 1299.457212] env[69992]: _type = "Task" [ 1299.457212] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.466913] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.587793] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.588096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.959272] env[69992]: DEBUG nova.scheduler.client.report [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1299.972019] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897763, 'name': PowerOffVM_Task, 'duration_secs': 0.215929} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.972298] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.972483] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1300.039535] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1300.065816] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1300.066122] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1300.066291] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1300.066481] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1300.066629] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1300.066778] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1300.066983] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1300.067165] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1300.067336] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1300.067502] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1300.067676] env[69992]: DEBUG nova.virt.hardware [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1300.068589] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a12c58b-8e73-48b6-a7d6-73844544a6b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.077319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3c9753-418e-4faf-a603-2c6e39d21c84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.091753] env[69992]: INFO nova.compute.manager [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Detaching volume 6e1d27f6-c008-45f2-a5e6-b07f86c5443b [ 1300.131051] env[69992]: INFO nova.virt.block_device [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Attempting to driver detach volume 6e1d27f6-c008-45f2-a5e6-b07f86c5443b from mountpoint /dev/sdb [ 1300.131310] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1300.131516] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582072', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'name': 'volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '57702674-4c96-4577-a93f-24ecffebb3a7', 'attached_at': '', 'detached_at': '', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'serial': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1300.132558] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f22bd22-27a3-45b8-aa6b-60b38298f2fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.154875] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212f9103-6dfb-47b1-8168-28905bd3e25e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.162762] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c7a5eb-5ad8-4306-8766-43eb557c5a78 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.182719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ab777a-d3c0-480c-93fc-43e1a46b918f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.198912] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] The volume has not been displaced from its original location: [datastore2] volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b/volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1300.204268] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1300.204591] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71c7993a-8ed6-45c2-8ca4-f681813b7243 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.223098] env[69992]: DEBUG oslo_vmware.api [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1300.223098] env[69992]: value = "task-2897764" [ 1300.223098] env[69992]: _type = "Task" [ 1300.223098] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.228271] env[69992]: DEBUG nova.compute.manager [req-f2dd7efd-f84e-4723-a393-6093bfd0f2cb req-64327e8b-f155-4189-9b7c-d1bab8448ad4 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-vif-plugged-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1300.228536] env[69992]: DEBUG oslo_concurrency.lockutils [req-f2dd7efd-f84e-4723-a393-6093bfd0f2cb req-64327e8b-f155-4189-9b7c-d1bab8448ad4 service nova] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.228792] env[69992]: DEBUG oslo_concurrency.lockutils [req-f2dd7efd-f84e-4723-a393-6093bfd0f2cb req-64327e8b-f155-4189-9b7c-d1bab8448ad4 service nova] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.229022] env[69992]: DEBUG oslo_concurrency.lockutils [req-f2dd7efd-f84e-4723-a393-6093bfd0f2cb req-64327e8b-f155-4189-9b7c-d1bab8448ad4 service nova] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.229171] env[69992]: DEBUG nova.compute.manager [req-f2dd7efd-f84e-4723-a393-6093bfd0f2cb req-64327e8b-f155-4189-9b7c-d1bab8448ad4 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] No waiting events found dispatching network-vif-plugged-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1300.229338] env[69992]: WARNING nova.compute.manager [req-f2dd7efd-f84e-4723-a393-6093bfd0f2cb req-64327e8b-f155-4189-9b7c-d1bab8448ad4 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received unexpected event network-vif-plugged-0042c1e4-d906-4261-a18e-ce232533cbdd for instance with vm_state building and task_state spawning. [ 1300.234637] env[69992]: DEBUG oslo_vmware.api [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897764, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.321350] env[69992]: DEBUG nova.network.neutron [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Successfully updated port: 0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1300.467596] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.468140] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1300.470902] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.478s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.471134] env[69992]: DEBUG nova.objects.instance [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1300.478385] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1300.478649] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1300.478860] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1300.479091] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1300.479248] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1300.479399] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1300.479605] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1300.479766] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1300.479939] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1300.481261] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1300.481261] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1300.485628] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-007d0ee0-d786-4e21-83aa-d8d037e98b3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.502623] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1300.502623] env[69992]: value = "task-2897765" [ 1300.502623] env[69992]: _type = "Task" [ 1300.502623] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.512312] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897765, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.732863] env[69992]: DEBUG oslo_vmware.api [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897764, 'name': ReconfigVM_Task, 'duration_secs': 0.232997} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.733208] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1300.737874] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ba0ba41-7cfe-44fa-9694-faabbc9f818a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.753076] env[69992]: DEBUG oslo_vmware.api [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1300.753076] env[69992]: value = "task-2897766" [ 1300.753076] env[69992]: _type = "Task" [ 1300.753076] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.761182] env[69992]: DEBUG oslo_vmware.api [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897766, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.824949] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.824949] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.824949] env[69992]: DEBUG nova.network.neutron [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1300.975694] env[69992]: DEBUG nova.compute.utils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1300.980443] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1300.980677] env[69992]: DEBUG nova.network.neutron [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1301.012680] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897765, 'name': ReconfigVM_Task, 'duration_secs': 0.163697} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.013044] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1301.023188] env[69992]: DEBUG nova.policy [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9776fce78af445a7bdc3a1547b10a427', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cbfc3fa19c94905a9f80c997f594ad2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1301.263171] env[69992]: DEBUG oslo_vmware.api [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897766, 'name': ReconfigVM_Task, 'duration_secs': 0.150953} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.263495] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582072', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'name': 'volume-6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '57702674-4c96-4577-a93f-24ecffebb3a7', 'attached_at': '', 'detached_at': '', 'volume_id': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b', 'serial': '6e1d27f6-c008-45f2-a5e6-b07f86c5443b'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1301.479030] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1301.485235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-45b1180e-2f79-4934-867a-792026a5b5e0 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.485235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.428s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.486491] env[69992]: INFO nova.compute.claims [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1301.492839] env[69992]: DEBUG nova.network.neutron [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1301.515905] env[69992]: DEBUG nova.network.neutron [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Successfully created port: 85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.520891] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1301.521143] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.521303] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1301.521506] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.521697] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1301.521894] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1301.522133] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1301.522314] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1301.522505] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1301.522674] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1301.522887] env[69992]: DEBUG nova.virt.hardware [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1301.528522] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Reconfiguring VM instance instance-00000053 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1301.529135] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34359416-0e40-43d4-be2e-278f15a32e2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.552665] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1301.552665] env[69992]: value = "task-2897767" [ 1301.552665] env[69992]: _type = "Task" [ 1301.552665] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.561146] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897767, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.700847] env[69992]: DEBUG nova.network.neutron [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.808820] env[69992]: DEBUG nova.objects.instance [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lazy-loading 'flavor' on Instance uuid 57702674-4c96-4577-a93f-24ecffebb3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.062185] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897767, 'name': ReconfigVM_Task, 'duration_secs': 0.157783} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.062428] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Reconfigured VM instance instance-00000053 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1302.063201] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc12ed2-7a72-48ef-94a8-19ca7bf45cfb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.084487] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4/31109fbd-ebc0-422d-a705-7d0e59d4bbb4.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1302.084969] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c471f3a2-42cd-4ea4-b30f-a701d8b41ab7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.102304] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1302.102304] env[69992]: value = "task-2897768" [ 1302.102304] env[69992]: _type = "Task" [ 1302.102304] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.109849] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.203531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.204038] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance network_info: |[{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1302.204581] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:45:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6fab536-1e48-4d07-992a-076f0e6d089c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0042c1e4-d906-4261-a18e-ce232533cbdd', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1302.212802] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating folder: Project (1471cdd6671b4e6ebc23b8fc2b120b63). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1302.213031] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-048051a4-08ed-4558-bda1-77fafc403a1a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.224832] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Created folder: Project (1471cdd6671b4e6ebc23b8fc2b120b63) in parent group-v581821. [ 1302.225038] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating folder: Instances. Parent ref: group-v582073. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1302.225284] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-032d5bcb-92df-45dd-a0a8-cead53b272ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.235016] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Created folder: Instances in parent group-v582073. [ 1302.235269] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1302.235464] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1302.235672] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a7246b1-c002-467d-8d45-1a518e48db44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.253775] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1302.253775] env[69992]: value = "task-2897771" [ 1302.253775] env[69992]: _type = "Task" [ 1302.253775] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.261351] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897771, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.263483] env[69992]: DEBUG nova.compute.manager [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-changed-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1302.263694] env[69992]: DEBUG nova.compute.manager [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Refreshing instance network info cache due to event network-changed-0042c1e4-d906-4261-a18e-ce232533cbdd. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1302.263955] env[69992]: DEBUG oslo_concurrency.lockutils [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.264031] env[69992]: DEBUG oslo_concurrency.lockutils [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1302.264228] env[69992]: DEBUG nova.network.neutron [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Refreshing network info cache for port 0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1302.489467] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1302.517540] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1302.517922] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.518205] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1302.518505] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.518821] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1302.519041] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1302.519386] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1302.519672] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1302.519956] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1302.520238] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1302.520542] env[69992]: DEBUG nova.virt.hardware [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1302.521991] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa3aeae-9c35-4ca0-acfe-490a9e8df724 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.535286] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa06e6c-b892-4daf-a756-ead52ad7ef0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.611402] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897768, 'name': ReconfigVM_Task, 'duration_secs': 0.260777} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.613820] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4/31109fbd-ebc0-422d-a705-7d0e59d4bbb4.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1302.614111] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1302.765586] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897771, 'name': CreateVM_Task, 'duration_secs': 0.308009} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.767534] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1302.768171] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.768340] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1302.768726] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1302.769184] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-289e687d-0ec9-4ecd-bc06-1571a2006554 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.773503] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1302.773503] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a08efb-e4b4-be1f-bfad-83c336612b22" [ 1302.773503] env[69992]: _type = "Task" [ 1302.773503] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.784852] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a08efb-e4b4-be1f-bfad-83c336612b22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.817998] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ad164afe-9613-4e56-8dae-6302544ca124 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.230s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.865509] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bde191-da6c-49d7-a6da-b4b24e8aa45d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.875070] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4e34e1-281b-4994-b354-ba14465ba403 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.907734] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654cd236-f301-45ca-a00b-994406c80149 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.915426] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7300ce-cf0b-468c-a4ad-d94cad5de0d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.933998] env[69992]: DEBUG nova.compute.provider_tree [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.066460] env[69992]: DEBUG nova.network.neutron [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updated VIF entry in instance network info cache for port 0042c1e4-d906-4261-a18e-ce232533cbdd. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1303.066752] env[69992]: DEBUG nova.network.neutron [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.120959] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5e1768-6a4b-4366-8210-61a111bb9f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.140710] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4cfeed-6b77-4074-903c-c42380f2ae0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.158078] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1303.185440] env[69992]: DEBUG nova.network.neutron [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Successfully updated port: 85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1303.287771] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a08efb-e4b4-be1f-bfad-83c336612b22, 'name': SearchDatastore_Task, 'duration_secs': 0.010823} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.288070] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1303.288304] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1303.288577] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.288684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.288862] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1303.289118] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7217b7ed-1d3c-4e35-a47e-2e9f35fab655 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.297215] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1303.297389] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1303.298041] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1146fca8-0a74-4e5d-be72-036997a9d1fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.302570] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1303.302570] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5283205d-9bd5-ef03-0d85-ad5017da9bb1" [ 1303.302570] env[69992]: _type = "Task" [ 1303.302570] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.309630] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5283205d-9bd5-ef03-0d85-ad5017da9bb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.438029] env[69992]: DEBUG nova.scheduler.client.report [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.541606] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "48558980-2800-4f5b-80ce-d59552445c3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.541874] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "48558980-2800-4f5b-80ce-d59552445c3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.569009] env[69992]: DEBUG oslo_concurrency.lockutils [req-3e7bd7d9-ce79-42bb-ba6f-70dfefa20c6e req-2002433b-0abb-4dac-a17a-3ec5c419d470 service nova] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1303.687508] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "refresh_cache-be28d7a8-6566-45aa-8b4c-08c7eb29864d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.687710] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquired lock "refresh_cache-be28d7a8-6566-45aa-8b4c-08c7eb29864d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.687873] env[69992]: DEBUG nova.network.neutron [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.696695] env[69992]: DEBUG nova.network.neutron [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Port 15455be6-d2df-46a9-bd15-7872eadb1ab6 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1303.814298] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5283205d-9bd5-ef03-0d85-ad5017da9bb1, 'name': SearchDatastore_Task, 'duration_secs': 0.008131} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.815072] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4903aa0e-8770-445c-95a2-1f3afcd5eab7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.820191] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1303.820191] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d16bf2-8331-e015-9acc-0e37db8c30a7" [ 1303.820191] env[69992]: _type = "Task" [ 1303.820191] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.827905] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d16bf2-8331-e015-9acc-0e37db8c30a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.942555] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.943112] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1303.946019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.838s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.946019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.948164] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.391s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.949628] env[69992]: INFO nova.compute.claims [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1303.972232] env[69992]: INFO nova.scheduler.client.report [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Deleted allocations for instance 9591b360-414b-4aa9-94b2-5b9ccb9e7d39 [ 1304.044482] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1304.225625] env[69992]: DEBUG nova.network.neutron [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1304.291935] env[69992]: DEBUG nova.compute.manager [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Received event network-vif-plugged-85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.292189] env[69992]: DEBUG oslo_concurrency.lockutils [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] Acquiring lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.292387] env[69992]: DEBUG oslo_concurrency.lockutils [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.292560] env[69992]: DEBUG oslo_concurrency.lockutils [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.292731] env[69992]: DEBUG nova.compute.manager [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] No waiting events found dispatching network-vif-plugged-85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1304.292897] env[69992]: WARNING nova.compute.manager [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Received unexpected event network-vif-plugged-85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 for instance with vm_state building and task_state spawning. [ 1304.293097] env[69992]: DEBUG nova.compute.manager [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Received event network-changed-85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.293271] env[69992]: DEBUG nova.compute.manager [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Refreshing instance network info cache due to event network-changed-85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1304.293438] env[69992]: DEBUG oslo_concurrency.lockutils [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] Acquiring lock "refresh_cache-be28d7a8-6566-45aa-8b4c-08c7eb29864d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.330922] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d16bf2-8331-e015-9acc-0e37db8c30a7, 'name': SearchDatastore_Task, 'duration_secs': 0.00951} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.333555] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.333833] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1304.334120] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29745df0-3626-4644-839b-70e6629732b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.341380] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1304.341380] env[69992]: value = "task-2897772" [ 1304.341380] env[69992]: _type = "Task" [ 1304.341380] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.349809] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.367827] env[69992]: DEBUG nova.network.neutron [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Updating instance_info_cache with network_info: [{"id": "85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4", "address": "fa:16:3e:c6:f3:7f", "network": {"id": "c1a8e187-2985-462e-8483-0a520daefefc", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1964185891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cbfc3fa19c94905a9f80c997f594ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85e3736f-8b", "ovs_interfaceid": "85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.454084] env[69992]: DEBUG nova.compute.utils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1304.457356] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1304.457589] env[69992]: DEBUG nova.network.neutron [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1304.480182] env[69992]: DEBUG oslo_concurrency.lockutils [None req-568a214d-beef-49af-98ff-6065a9c94735 tempest-ServersTestManualDisk-54595896 tempest-ServersTestManualDisk-54595896-project-member] Lock "9591b360-414b-4aa9-94b2-5b9ccb9e7d39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.187s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.497804] env[69992]: DEBUG nova.policy [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1304.576659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.727340] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.727340] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.727547] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.809658] env[69992]: DEBUG nova.network.neutron [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Successfully created port: 9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1304.856610] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477396} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.856878] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1304.857082] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1304.857338] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a8931cf-a848-4e00-ac2d-6764ccf3c827 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.863720] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1304.863720] env[69992]: value = "task-2897773" [ 1304.863720] env[69992]: _type = "Task" [ 1304.863720] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.871280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Releasing lock "refresh_cache-be28d7a8-6566-45aa-8b4c-08c7eb29864d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.871585] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Instance network_info: |[{"id": "85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4", "address": "fa:16:3e:c6:f3:7f", "network": {"id": "c1a8e187-2985-462e-8483-0a520daefefc", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1964185891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cbfc3fa19c94905a9f80c997f594ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85e3736f-8b", "ovs_interfaceid": "85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1304.871904] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.872167] env[69992]: DEBUG oslo_concurrency.lockutils [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] Acquired lock "refresh_cache-be28d7a8-6566-45aa-8b4c-08c7eb29864d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.872344] env[69992]: DEBUG nova.network.neutron [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Refreshing network info cache for port 85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1304.873635] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:f3:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1304.885022] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Creating folder: Project (8cbfc3fa19c94905a9f80c997f594ad2). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.885022] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-391cf32a-79a8-48c0-abf6-126d65ba5a83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.892771] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Created folder: Project (8cbfc3fa19c94905a9f80c997f594ad2) in parent group-v581821. [ 1304.893015] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Creating folder: Instances. Parent ref: group-v582076. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1304.893266] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6e858a1-c0e7-45ee-ad44-dc8aa0efefcf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.902863] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Created folder: Instances in parent group-v582076. [ 1304.903147] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.903342] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1304.903561] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2fd2cfc-67d9-4696-9030-5521802fe866 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.923011] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1304.923011] env[69992]: value = "task-2897776" [ 1304.923011] env[69992]: _type = "Task" [ 1304.923011] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.930890] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897776, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.958259] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1305.261774] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca42d293-c05f-4ae8-9f04-65b7a424de7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.271236] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a33090-d8f5-4218-bc92-9eaf350187ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.301925] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef13193-f7bb-46a4-8b18-fe5c567e79b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.310161] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56eb9795-51f7-4a85-aace-f60bbd5a2db4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.324889] env[69992]: DEBUG nova.compute.provider_tree [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.372892] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068995} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.373167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1305.373923] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c0348f-f334-432a-a1d2-69bd86aab675 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.397299] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1305.397564] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f04dd05-2973-469d-8df9-4a5079acc677 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.416819] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1305.416819] env[69992]: value = "task-2897777" [ 1305.416819] env[69992]: _type = "Task" [ 1305.416819] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.424439] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.431442] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897776, 'name': CreateVM_Task, 'duration_secs': 0.379899} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.431717] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1305.432379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.432542] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.432853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1305.433107] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-829eb35a-4bc4-4f7f-a4bd-29a9539e8773 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.440059] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1305.440059] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524903ff-5dd0-c75d-7b27-094a10adc920" [ 1305.440059] env[69992]: _type = "Task" [ 1305.440059] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.448013] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524903ff-5dd0-c75d-7b27-094a10adc920, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.610976] env[69992]: DEBUG nova.network.neutron [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Updated VIF entry in instance network info cache for port 85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1305.611377] env[69992]: DEBUG nova.network.neutron [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Updating instance_info_cache with network_info: [{"id": "85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4", "address": "fa:16:3e:c6:f3:7f", "network": {"id": "c1a8e187-2985-462e-8483-0a520daefefc", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1964185891-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cbfc3fa19c94905a9f80c997f594ad2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85e3736f-8b", "ovs_interfaceid": "85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.760794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.760794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.760907] env[69992]: DEBUG nova.network.neutron [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.827798] env[69992]: DEBUG nova.scheduler.client.report [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1305.930375] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897777, 'name': ReconfigVM_Task, 'duration_secs': 0.279803} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.930659] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1305.931360] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7609176b-ddf4-4b07-84e1-ee9b6c3b7eed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.937851] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1305.937851] env[69992]: value = "task-2897778" [ 1305.937851] env[69992]: _type = "Task" [ 1305.937851] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.948384] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897778, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.951817] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524903ff-5dd0-c75d-7b27-094a10adc920, 'name': SearchDatastore_Task, 'duration_secs': 0.009988} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.952110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.952347] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1305.952578] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.952727] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.952905] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1305.953183] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e1d96f9-f30d-40aa-bdcf-47325a35ed7d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.961102] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1305.961298] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1305.962044] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f0bd394-df29-4eec-be5f-527e06153ab1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.970039] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1305.970039] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52875516-539b-104b-1a3e-0d2b4c4ab197" [ 1305.970039] env[69992]: _type = "Task" [ 1305.970039] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.971210] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1305.984245] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52875516-539b-104b-1a3e-0d2b4c4ab197, 'name': SearchDatastore_Task, 'duration_secs': 0.010585} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.985165] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce1bc496-47f4-46c4-98a0-e407a5ed398f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.991172] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1305.991172] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5219eacb-8237-c477-c37b-cc01474b84f6" [ 1305.991172] env[69992]: _type = "Task" [ 1305.991172] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.004014] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5219eacb-8237-c477-c37b-cc01474b84f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009607} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.004416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.004553] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] be28d7a8-6566-45aa-8b4c-08c7eb29864d/be28d7a8-6566-45aa-8b4c-08c7eb29864d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1306.004758] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f66e7578-909b-4da4-ba84-ff7cc48cbcc3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.009670] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1306.009897] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.010073] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1306.010264] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.010431] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1306.010557] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1306.010793] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1306.010964] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1306.011154] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1306.011319] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1306.011492] env[69992]: DEBUG nova.virt.hardware [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1306.012310] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b79e4f-1309-41a8-9ee1-1ade6f7da05e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.015710] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1306.015710] env[69992]: value = "task-2897779" [ 1306.015710] env[69992]: _type = "Task" [ 1306.015710] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.022475] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfef752-da52-4557-b99f-f026ddaa658d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.030166] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.114526] env[69992]: DEBUG oslo_concurrency.lockutils [req-c25fca6e-6d04-4096-a017-4ac59a0e1414 req-14f3aeac-5dd8-4447-bbb3-6a1fb4836d15 service nova] Releasing lock "refresh_cache-be28d7a8-6566-45aa-8b4c-08c7eb29864d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.330214] env[69992]: DEBUG nova.compute.manager [req-38ee6d76-9d5f-476b-af01-61b9039ac8f6 req-c894780e-f1fb-46a9-9fe4-55dca91e6d94 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Received event network-vif-plugged-9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1306.330500] env[69992]: DEBUG oslo_concurrency.lockutils [req-38ee6d76-9d5f-476b-af01-61b9039ac8f6 req-c894780e-f1fb-46a9-9fe4-55dca91e6d94 service nova] Acquiring lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.330790] env[69992]: DEBUG oslo_concurrency.lockutils [req-38ee6d76-9d5f-476b-af01-61b9039ac8f6 req-c894780e-f1fb-46a9-9fe4-55dca91e6d94 service nova] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.330989] env[69992]: DEBUG oslo_concurrency.lockutils [req-38ee6d76-9d5f-476b-af01-61b9039ac8f6 req-c894780e-f1fb-46a9-9fe4-55dca91e6d94 service nova] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.331240] env[69992]: DEBUG nova.compute.manager [req-38ee6d76-9d5f-476b-af01-61b9039ac8f6 req-c894780e-f1fb-46a9-9fe4-55dca91e6d94 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] No waiting events found dispatching network-vif-plugged-9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1306.331383] env[69992]: WARNING nova.compute.manager [req-38ee6d76-9d5f-476b-af01-61b9039ac8f6 req-c894780e-f1fb-46a9-9fe4-55dca91e6d94 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Received unexpected event network-vif-plugged-9fcd8287-e476-4c07-87e5-ec3dcbad7449 for instance with vm_state building and task_state spawning. [ 1306.332754] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.333085] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1306.336268] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.251s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.336268] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.339788] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.736s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.343179] env[69992]: INFO nova.compute.claims [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1306.387455] env[69992]: INFO nova.scheduler.client.report [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted allocations for instance dedba037-48a7-4083-925d-5f34e2a27362 [ 1306.457255] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897778, 'name': Rename_Task, 'duration_secs': 0.178043} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.457454] env[69992]: DEBUG nova.network.neutron [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Successfully updated port: 9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1306.462060] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1306.462565] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6b1d830-58cd-4b93-8ead-7176d2ed3afd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.475038] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1306.475038] env[69992]: value = "task-2897780" [ 1306.475038] env[69992]: _type = "Task" [ 1306.475038] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.485438] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897780, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.530062] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501827} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.530340] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] be28d7a8-6566-45aa-8b4c-08c7eb29864d/be28d7a8-6566-45aa-8b4c-08c7eb29864d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1306.530558] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1306.532567] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-794e201c-c859-4d26-ae2d-98fa80b417bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.536820] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1306.536820] env[69992]: value = "task-2897781" [ 1306.536820] env[69992]: _type = "Task" [ 1306.536820] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.546455] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897781, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.606964] env[69992]: DEBUG nova.network.neutron [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [{"id": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "address": "fa:16:3e:7b:a7:d0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15455be6-d2", "ovs_interfaceid": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.852130] env[69992]: DEBUG nova.compute.utils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1306.855675] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1306.855849] env[69992]: DEBUG nova.network.neutron [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1306.897993] env[69992]: DEBUG nova.policy [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '503bcef25d9c4b2c8d2bc52f821ff188', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66b12281925c40fe95b9b4abb0cb6457', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1306.904048] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e9b629d-ace8-4544-b79d-0aaa9ebc24aa tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "dedba037-48a7-4083-925d-5f34e2a27362" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.864s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.963258] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.963512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.963573] env[69992]: DEBUG nova.network.neutron [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1306.992244] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897780, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.046958] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897781, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066369} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.047251] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1307.048073] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e277900-80db-4e5d-aa91-11c66eaf8c79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.071376] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] be28d7a8-6566-45aa-8b4c-08c7eb29864d/be28d7a8-6566-45aa-8b4c-08c7eb29864d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1307.071662] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-571b5ff8-6b3f-4d18-9f36-e378bd656315 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.091396] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1307.091396] env[69992]: value = "task-2897782" [ 1307.091396] env[69992]: _type = "Task" [ 1307.091396] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.099741] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.109957] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.194431] env[69992]: DEBUG nova.network.neutron [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Successfully created port: b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1307.356321] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1307.490743] env[69992]: DEBUG oslo_vmware.api [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897780, 'name': PowerOnVM_Task, 'duration_secs': 0.6056} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.491032] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1307.491255] env[69992]: INFO nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Took 7.45 seconds to spawn the instance on the hypervisor. [ 1307.491422] env[69992]: DEBUG nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1307.492243] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9e5493-2263-476f-8416-5bf25a114bb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.505383] env[69992]: DEBUG nova.network.neutron [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1307.604935] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897782, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.636714] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67664b7b-1e8c-468f-b101-484c49424c62 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.663621] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962d1f13-d043-4c6b-8a21-1e8b47fea156 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.670849] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1307.677353] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36d6292-8332-4e68-aea0-d9bf98a3308d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.681681] env[69992]: DEBUG nova.network.neutron [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Updating instance_info_cache with network_info: [{"id": "9fcd8287-e476-4c07-87e5-ec3dcbad7449", "address": "fa:16:3e:17:89:4d", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fcd8287-e4", "ovs_interfaceid": "9fcd8287-e476-4c07-87e5-ec3dcbad7449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.689200] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c96a093-dd0f-4450-8349-0a028a02e81d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.722552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f564f231-411b-46ea-8584-2e4bd13b263b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.730935] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb5343d-f6af-46b4-ae4b-42fe87858201 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.745384] env[69992]: DEBUG nova.compute.provider_tree [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1307.827400] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.827657] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1308.017831] env[69992]: INFO nova.compute.manager [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Took 37.09 seconds to build instance. [ 1308.107174] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897782, 'name': ReconfigVM_Task, 'duration_secs': 0.913668} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.107674] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Reconfigured VM instance instance-0000005b to attach disk [datastore2] be28d7a8-6566-45aa-8b4c-08c7eb29864d/be28d7a8-6566-45aa-8b4c-08c7eb29864d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1308.108456] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acefc9a4-53b6-462f-b3fc-aec0de75b6ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.116813] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1308.116813] env[69992]: value = "task-2897783" [ 1308.116813] env[69992]: _type = "Task" [ 1308.116813] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.124917] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897783, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.177864] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1308.177864] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7305150d-6903-4328-9d36-8e75ce548dfc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.185226] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1308.185226] env[69992]: value = "task-2897784" [ 1308.185226] env[69992]: _type = "Task" [ 1308.185226] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.188495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.188824] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Instance network_info: |[{"id": "9fcd8287-e476-4c07-87e5-ec3dcbad7449", "address": "fa:16:3e:17:89:4d", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fcd8287-e4", "ovs_interfaceid": "9fcd8287-e476-4c07-87e5-ec3dcbad7449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1308.189279] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:89:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fcd8287-e476-4c07-87e5-ec3dcbad7449', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1308.202075] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1308.205818] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1308.206176] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.206409] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb2b35d5-cdcb-410e-a16c-dd2990daa34c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.227026] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1308.227026] env[69992]: value = "task-2897785" [ 1308.227026] env[69992]: _type = "Task" [ 1308.227026] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.235858] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897785, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.251466] env[69992]: DEBUG nova.scheduler.client.report [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1308.332428] env[69992]: DEBUG nova.compute.utils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1308.371482] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1308.395748] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1308.396116] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1308.396287] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1308.396478] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1308.396635] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1308.396789] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1308.397014] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1308.397184] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1308.397356] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1308.397705] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1308.397705] env[69992]: DEBUG nova.virt.hardware [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1308.398561] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf02d1f-9994-482c-9906-e4449c27f648 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.406979] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad609d9c-fe0b-4aa0-9a50-aedc71991f08 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.520470] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e3fadd6e-b235-4bd1-ae4a-a18eb073cdbb tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.599s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.627555] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897783, 'name': Rename_Task, 'duration_secs': 0.195655} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.627915] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1308.628190] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f93a8a0-7da8-4952-8c5d-709e39b4a6f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.636031] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1308.636031] env[69992]: value = "task-2897786" [ 1308.636031] env[69992]: _type = "Task" [ 1308.636031] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.643249] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.694499] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897784, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.703567] env[69992]: DEBUG nova.network.neutron [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Successfully updated port: b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1308.724371] env[69992]: DEBUG nova.compute.manager [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Received event network-changed-9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1308.724415] env[69992]: DEBUG nova.compute.manager [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Refreshing instance network info cache due to event network-changed-9fcd8287-e476-4c07-87e5-ec3dcbad7449. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1308.724606] env[69992]: DEBUG oslo_concurrency.lockutils [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] Acquiring lock "refresh_cache-d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.724813] env[69992]: DEBUG oslo_concurrency.lockutils [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] Acquired lock "refresh_cache-d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1308.724955] env[69992]: DEBUG nova.network.neutron [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Refreshing network info cache for port 9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.737591] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897785, 'name': CreateVM_Task, 'duration_secs': 0.33128} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.737745] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1308.738435] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.738586] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1308.738898] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1308.739214] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8e3744-ffb7-480c-b65f-cf29ca8190ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.744199] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1308.744199] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52df790f-40c8-c370-a7c9-b83471b5ee57" [ 1308.744199] env[69992]: _type = "Task" [ 1308.744199] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.754358] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.754843] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1308.757577] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52df790f-40c8-c370-a7c9-b83471b5ee57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.758077] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.668s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1308.759502] env[69992]: INFO nova.compute.claims [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1308.834810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.127387] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.127667] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.145720] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897786, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.195052] env[69992]: DEBUG oslo_vmware.api [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897784, 'name': PowerOnVM_Task, 'duration_secs': 0.753516} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.195325] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.195511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a09415-9cf4-4fe9-ba4d-38675ba8ee15 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance '31109fbd-ebc0-422d-a705-7d0e59d4bbb4' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1309.202857] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "refresh_cache-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.202999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquired lock "refresh_cache-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.203205] env[69992]: DEBUG nova.network.neutron [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.256036] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52df790f-40c8-c370-a7c9-b83471b5ee57, 'name': SearchDatastore_Task, 'duration_secs': 0.009481} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.256330] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1309.256694] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1309.257047] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.257292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.257554] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1309.257838] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e60dfc5c-e5f1-46cf-ae2b-b789f56818c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.260838] env[69992]: DEBUG nova.compute.utils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1309.263041] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1309.263238] env[69992]: DEBUG nova.network.neutron [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1309.274413] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1309.274603] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1309.275359] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9174ff9-834b-4a00-ac36-c1418117456b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.281924] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1309.281924] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e70af9-069b-b95b-97d3-87d6e53626ae" [ 1309.281924] env[69992]: _type = "Task" [ 1309.281924] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.290918] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e70af9-069b-b95b-97d3-87d6e53626ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.324199] env[69992]: DEBUG nova.policy [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1309.558278] env[69992]: DEBUG nova.network.neutron [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Updated VIF entry in instance network info cache for port 9fcd8287-e476-4c07-87e5-ec3dcbad7449. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1309.558667] env[69992]: DEBUG nova.network.neutron [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Updating instance_info_cache with network_info: [{"id": "9fcd8287-e476-4c07-87e5-ec3dcbad7449", "address": "fa:16:3e:17:89:4d", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fcd8287-e4", "ovs_interfaceid": "9fcd8287-e476-4c07-87e5-ec3dcbad7449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.629994] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1309.648457] env[69992]: DEBUG oslo_vmware.api [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897786, 'name': PowerOnVM_Task, 'duration_secs': 0.756433} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.648749] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.648964] env[69992]: INFO nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1309.649215] env[69992]: DEBUG nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1309.649943] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c408cda-261b-46cf-a14c-64a6fda9aee3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.745601] env[69992]: DEBUG nova.network.neutron [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1309.752902] env[69992]: DEBUG nova.network.neutron [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Successfully created port: 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1309.765767] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1309.793258] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e70af9-069b-b95b-97d3-87d6e53626ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009506} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.796701] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85d8177c-dc33-46bd-98bb-36a413335f8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.809228] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1309.809228] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52adbde4-de07-f14b-b757-71035eb57c18" [ 1309.809228] env[69992]: _type = "Task" [ 1309.809228] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.824164] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52adbde4-de07-f14b-b757-71035eb57c18, 'name': SearchDatastore_Task, 'duration_secs': 0.009701} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.824424] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1309.824884] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4/d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1309.825184] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3ed54e8-c8ac-459a-afc6-273f9c6ca3b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.836168] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1309.836168] env[69992]: value = "task-2897787" [ 1309.836168] env[69992]: _type = "Task" [ 1309.836168] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.840859] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.899833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.900100] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.900332] env[69992]: INFO nova.compute.manager [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Attaching volume 5fab4ca8-04ab-4575-95a1-e55e5a73415d to /dev/sdb [ 1309.951802] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abbf6a5-333b-4e93-ba1f-b71ab4050b99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.961855] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba1e8e2-1d65-4788-b893-5bc52486106e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.965586] env[69992]: DEBUG nova.network.neutron [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Updating instance_info_cache with network_info: [{"id": "b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca", "address": "fa:16:3e:1e:70:b0", "network": {"id": "7939598a-4728-456a-9950-619744f8668b", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1074907595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66b12281925c40fe95b9b4abb0cb6457", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0e7a4-6b", "ovs_interfaceid": "b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.985599] env[69992]: DEBUG nova.virt.block_device [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating existing volume attachment record: b224c91a-bbc8-4f25-9d8f-92a5cc2265ad {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1310.063020] env[69992]: DEBUG oslo_concurrency.lockutils [req-c34982b3-f8dc-4c3e-95fd-7a669f9830a9 req-6ee44e6e-d60f-4dff-9d71-3b9ffe97dffb service nova] Releasing lock "refresh_cache-d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1310.148749] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.148986] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.168527] env[69992]: INFO nova.compute.manager [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Took 33.37 seconds to build instance. [ 1310.173190] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.174446] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144c1171-c8c6-475d-90a0-ba8e2cf6c6b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.185376] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f286afd5-c9f9-4857-8843-dedde157dd7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.227411] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb075742-a9c7-4c0d-ac7c-7efb4586a72a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.237440] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8c5e65-97b2-4c3f-870f-66d9838e6f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.256180] env[69992]: DEBUG nova.compute.provider_tree [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.344938] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.345566] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4/d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1310.345836] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1310.346058] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d2a1381-02a2-45c8-9c7c-4cd28670cd29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.352726] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1310.352726] env[69992]: value = "task-2897789" [ 1310.352726] env[69992]: _type = "Task" [ 1310.352726] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.362831] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897789, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.468529] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Releasing lock "refresh_cache-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1310.468989] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Instance network_info: |[{"id": "b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca", "address": "fa:16:3e:1e:70:b0", "network": {"id": "7939598a-4728-456a-9950-619744f8668b", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1074907595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66b12281925c40fe95b9b4abb0cb6457", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0e7a4-6b", "ovs_interfaceid": "b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1310.469566] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:70:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1520c99-af74-4d61-a8ae-56aef56ef4f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1310.481794] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Creating folder: Project (66b12281925c40fe95b9b4abb0cb6457). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1310.482203] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54674424-6213-45fc-9d38-bb7ac33b1b5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.492363] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Created folder: Project (66b12281925c40fe95b9b4abb0cb6457) in parent group-v581821. [ 1310.492647] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Creating folder: Instances. Parent ref: group-v582081. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1310.492953] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e78d0c99-0fd1-4152-9fd4-dcd8b3022dff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.504685] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Created folder: Instances in parent group-v582081. [ 1310.504685] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1310.504685] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1310.504685] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d39b4cf2-f303-4be1-a836-ca07a082f95e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.530665] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1310.530665] env[69992]: value = "task-2897794" [ 1310.530665] env[69992]: _type = "Task" [ 1310.530665] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.540559] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897794, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.652792] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1310.671439] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0438a6f7-3828-4793-ab9d-33a19d0e4200 tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.921s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.759790] env[69992]: DEBUG nova.scheduler.client.report [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1310.779259] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1310.808795] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1310.809161] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1310.809398] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1310.809710] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1310.810567] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1310.810567] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1310.810567] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1310.810708] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1310.811521] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1310.811521] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1310.811521] env[69992]: DEBUG nova.virt.hardware [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1310.812727] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91525498-55ad-462e-97c5-14908bc51f0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.821143] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262a3b04-e84d-414b-b5d7-266d16daab6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.840014] env[69992]: DEBUG nova.compute.manager [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Received event network-vif-plugged-b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1310.840325] env[69992]: DEBUG oslo_concurrency.lockutils [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] Acquiring lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.840611] env[69992]: DEBUG oslo_concurrency.lockutils [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.840882] env[69992]: DEBUG oslo_concurrency.lockutils [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.841192] env[69992]: DEBUG nova.compute.manager [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] No waiting events found dispatching network-vif-plugged-b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1310.841498] env[69992]: WARNING nova.compute.manager [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Received unexpected event network-vif-plugged-b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca for instance with vm_state building and task_state spawning. [ 1310.841794] env[69992]: DEBUG nova.compute.manager [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Received event network-changed-b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1310.842094] env[69992]: DEBUG nova.compute.manager [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Refreshing instance network info cache due to event network-changed-b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1310.842503] env[69992]: DEBUG oslo_concurrency.lockutils [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] Acquiring lock "refresh_cache-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.842800] env[69992]: DEBUG oslo_concurrency.lockutils [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] Acquired lock "refresh_cache-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.843059] env[69992]: DEBUG nova.network.neutron [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Refreshing network info cache for port b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.863203] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897789, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114635} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.863499] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1310.864333] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4561a5ec-d57a-427f-8236-27a19eaddd21 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.888597] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4/d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1310.889412] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62f6239c-a1c6-407a-89ed-fd9eee48d233 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.913017] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1310.913017] env[69992]: value = "task-2897795" [ 1310.913017] env[69992]: _type = "Task" [ 1310.913017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.918906] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.042148] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897794, 'name': CreateVM_Task, 'duration_secs': 0.369477} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.042329] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1311.043137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.043310] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.043627] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1311.043882] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1b006ec-fd4a-4b1a-80af-f01673f9497f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.048417] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1311.048417] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c0c35f-34c9-1bd0-20b8-3157c9216c96" [ 1311.048417] env[69992]: _type = "Task" [ 1311.048417] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.056939] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c0c35f-34c9-1bd0-20b8-3157c9216c96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.176508] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.265486] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.266060] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1311.268833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.854s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.269628] env[69992]: DEBUG nova.objects.instance [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lazy-loading 'resources' on Instance uuid c6e4f19b-7264-4eea-a472-f64a68d4df22 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.349678] env[69992]: DEBUG nova.network.neutron [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Successfully updated port: 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.420600] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897795, 'name': ReconfigVM_Task, 'duration_secs': 0.352205} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.420947] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Reconfigured VM instance instance-0000005c to attach disk [datastore1] d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4/d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1311.421571] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83cf92b1-35df-46cc-adf1-0b1606610516 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.428500] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1311.428500] env[69992]: value = "task-2897796" [ 1311.428500] env[69992]: _type = "Task" [ 1311.428500] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.436442] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897796, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.525499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.525786] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.526009] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.526202] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.526371] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.528551] env[69992]: INFO nova.compute.manager [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Terminating instance [ 1311.537976] env[69992]: DEBUG nova.network.neutron [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Updated VIF entry in instance network info cache for port b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.538372] env[69992]: DEBUG nova.network.neutron [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Updating instance_info_cache with network_info: [{"id": "b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca", "address": "fa:16:3e:1e:70:b0", "network": {"id": "7939598a-4728-456a-9950-619744f8668b", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1074907595-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66b12281925c40fe95b9b4abb0cb6457", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a0e7a4-6b", "ovs_interfaceid": "b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.558359] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c0c35f-34c9-1bd0-20b8-3157c9216c96, 'name': SearchDatastore_Task, 'duration_secs': 0.011968} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.558640] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.558906] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1311.559152] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.559303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.559479] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.559728] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e35e68e1-6ee1-43d3-a2fb-65be2a27584e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.567999] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.568138] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1311.568867] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22cdc9ee-3c7f-41e1-9db5-2bc179bff28c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.574952] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1311.574952] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52cbdd33-807f-7b5b-5d8e-a067cfcde340" [ 1311.574952] env[69992]: _type = "Task" [ 1311.574952] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.582268] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cbdd33-807f-7b5b-5d8e-a067cfcde340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.772262] env[69992]: DEBUG nova.compute.utils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1311.773650] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1311.773828] env[69992]: DEBUG nova.network.neutron [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1311.814449] env[69992]: DEBUG nova.policy [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d2ee1abedf4874bcb44b4076199da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b8716c4b7324052a3472734c655655a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1311.852072] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.852240] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.852399] env[69992]: DEBUG nova.network.neutron [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.941143] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897796, 'name': Rename_Task, 'duration_secs': 0.147434} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.941143] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1311.941143] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42b1d055-d65e-4ac4-b9af-cfe6a6b7eb4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.947903] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1311.947903] env[69992]: value = "task-2897797" [ 1311.947903] env[69992]: _type = "Task" [ 1311.947903] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.958833] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.034098] env[69992]: DEBUG nova.compute.manager [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1312.034265] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1312.035334] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851955de-bd4b-4918-961b-5ad5d3600165 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.041308] env[69992]: DEBUG oslo_concurrency.lockutils [req-a42fa3d8-162b-4763-9e66-69e6b63a3702 req-4810d172-99b3-40e2-96f3-766de95979e3 service nova] Releasing lock "refresh_cache-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.045497] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1312.049599] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9238f30c-d6c8-42dd-8d98-c69f7577913e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.057367] env[69992]: DEBUG oslo_vmware.api [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1312.057367] env[69992]: value = "task-2897798" [ 1312.057367] env[69992]: _type = "Task" [ 1312.057367] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.069020] env[69992]: DEBUG oslo_vmware.api [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897798, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.089423] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cbdd33-807f-7b5b-5d8e-a067cfcde340, 'name': SearchDatastore_Task, 'duration_secs': 0.008493} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.090323] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbeb6dd3-d5d4-4167-8f65-de8a2bea8f7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.094671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9f5aa2-e46c-4f23-a1a0-ab94b3562ef5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.098271] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1312.098271] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5264a209-5993-5398-206d-4afb6dc529a2" [ 1312.098271] env[69992]: _type = "Task" [ 1312.098271] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.104530] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702aab71-dd7d-491e-bc40-71f55cb4ae58 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.112051] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5264a209-5993-5398-206d-4afb6dc529a2, 'name': SearchDatastore_Task, 'duration_secs': 0.009639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.112317] env[69992]: DEBUG nova.network.neutron [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Successfully created port: 62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1312.114474] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.114730] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9/1cf5a6d2-8ec9-429a-9c31-eb3c699389d9.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1312.115163] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4240839f-bab4-4b74-a97c-b6bc7c5da86a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.144947] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a213809-2a86-47c3-9528-c41062f53104 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.149087] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1312.149087] env[69992]: value = "task-2897799" [ 1312.149087] env[69992]: _type = "Task" [ 1312.149087] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.155091] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ad49ef-0ac8-411e-9741-202adc823946 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.164195] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.172200] env[69992]: DEBUG nova.compute.provider_tree [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.228613] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.228975] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.229431] env[69992]: DEBUG nova.compute.manager [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Going to confirm migration 5 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1312.276706] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1312.399848] env[69992]: DEBUG nova.network.neutron [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1312.461958] env[69992]: DEBUG oslo_vmware.api [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897797, 'name': PowerOnVM_Task, 'duration_secs': 0.506228} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.462247] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.462461] env[69992]: INFO nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Took 6.49 seconds to spawn the instance on the hypervisor. [ 1312.464164] env[69992]: DEBUG nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.464164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be50f267-d411-4bf1-b766-c847894f96eb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.572047] env[69992]: DEBUG oslo_vmware.api [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897798, 'name': PowerOffVM_Task, 'duration_secs': 0.264697} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.572356] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1312.572639] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1312.573043] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11c6c316-3105-44db-b36d-0976398a5a61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.589422] env[69992]: DEBUG nova.network.neutron [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.651287] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1312.651407] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1312.651684] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Deleting the datastore file [datastore2] be28d7a8-6566-45aa-8b4c-08c7eb29864d {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1312.652209] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-555c0c00-9806-479f-8546-63f9377f38d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.663158] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897799, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50054} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.664366] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9/1cf5a6d2-8ec9-429a-9c31-eb3c699389d9.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1312.664591] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1312.664894] env[69992]: DEBUG oslo_vmware.api [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for the task: (returnval){ [ 1312.664894] env[69992]: value = "task-2897802" [ 1312.664894] env[69992]: _type = "Task" [ 1312.664894] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.665080] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51578acf-5c39-41a7-949d-5a2f299352e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.675372] env[69992]: DEBUG nova.scheduler.client.report [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.678672] env[69992]: DEBUG oslo_vmware.api [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.679988] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1312.679988] env[69992]: value = "task-2897803" [ 1312.679988] env[69992]: _type = "Task" [ 1312.679988] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.687922] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.790978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.791274] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.791482] env[69992]: DEBUG nova.network.neutron [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1312.791707] env[69992]: DEBUG nova.objects.instance [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lazy-loading 'info_cache' on Instance uuid 31109fbd-ebc0-422d-a705-7d0e59d4bbb4 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1312.856265] env[69992]: DEBUG nova.compute.manager [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-vif-plugged-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1312.856383] env[69992]: DEBUG oslo_concurrency.lockutils [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.856618] env[69992]: DEBUG oslo_concurrency.lockutils [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1312.856748] env[69992]: DEBUG oslo_concurrency.lockutils [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.856938] env[69992]: DEBUG nova.compute.manager [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] No waiting events found dispatching network-vif-plugged-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1312.857132] env[69992]: WARNING nova.compute.manager [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received unexpected event network-vif-plugged-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 for instance with vm_state building and task_state spawning. [ 1312.857324] env[69992]: DEBUG nova.compute.manager [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1312.857494] env[69992]: DEBUG nova.compute.manager [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing instance network info cache due to event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1312.858382] env[69992]: DEBUG oslo_concurrency.lockutils [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.985042] env[69992]: INFO nova.compute.manager [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Took 32.95 seconds to build instance. [ 1313.092745] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.092867] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Instance network_info: |[{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1313.093289] env[69992]: DEBUG oslo_concurrency.lockutils [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.093545] env[69992]: DEBUG nova.network.neutron [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.095199] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:a9:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b550e88-755a-45a1-98fd-6fcb8fa4a7a8', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1313.107455] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1313.108987] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1313.108987] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2337769f-55a8-46b6-939c-daa3e405faf9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.141044] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1313.141044] env[69992]: value = "task-2897804" [ 1313.141044] env[69992]: _type = "Task" [ 1313.141044] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.152011] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897804, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.176380] env[69992]: DEBUG oslo_vmware.api [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Task: {'id': task-2897802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172215} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.176643] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1313.176824] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1313.177013] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1313.177196] env[69992]: INFO nova.compute.manager [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1313.177431] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1313.178029] env[69992]: DEBUG nova.compute.manager [-] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1313.178029] env[69992]: DEBUG nova.network.neutron [-] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1313.184378] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.186374] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.930s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.186597] env[69992]: DEBUG nova.objects.instance [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'resources' on Instance uuid a8813822-f77b-4b73-a6dc-e0eab83b0402 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1313.192458] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063928} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.193993] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1313.193993] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6f6f92-c9a7-4649-b896-3cee186cb0d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.221182] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9/1cf5a6d2-8ec9-429a-9c31-eb3c699389d9.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.221925] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9688ee0-14d8-4682-a55e-642d66ce5c90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.247932] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1313.247932] env[69992]: value = "task-2897805" [ 1313.247932] env[69992]: _type = "Task" [ 1313.247932] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.256810] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897805, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.257761] env[69992]: INFO nova.scheduler.client.report [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Deleted allocations for instance c6e4f19b-7264-4eea-a472-f64a68d4df22 [ 1313.287994] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1313.317504] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1313.318407] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.318615] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1313.318843] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.319077] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1313.319810] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1313.319810] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1313.319966] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1313.320132] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1313.320334] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1313.320551] env[69992]: DEBUG nova.virt.hardware [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1313.321779] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73e382a-2cc3-430d-9512-63418cdfa5cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.331446] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23677b8e-f4b3-4676-ad01-f489897edd71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.488173] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4cc865dc-27a6-4f04-8b88-cd8d5ac9a3c8 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.462s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.652738] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897804, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.760383] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897805, 'name': ReconfigVM_Task, 'duration_secs': 0.277874} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.760686] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9/1cf5a6d2-8ec9-429a-9c31-eb3c699389d9.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1313.761363] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04533cc3-260d-493a-b4c5-03803beee855 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.768042] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7909375c-2f63-4830-8742-0ab1382257a7 tempest-ImagesTestJSON-1158997241 tempest-ImagesTestJSON-1158997241-project-member] Lock "c6e4f19b-7264-4eea-a472-f64a68d4df22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.909s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.772259] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1313.772259] env[69992]: value = "task-2897806" [ 1313.772259] env[69992]: _type = "Task" [ 1313.772259] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.783345] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897806, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.876369] env[69992]: DEBUG nova.network.neutron [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Successfully updated port: 62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1313.918273] env[69992]: DEBUG nova.network.neutron [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updated VIF entry in instance network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.918718] env[69992]: DEBUG nova.network.neutron [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.009898] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e8b54f-43f9-4439-be8d-184d6d36b64a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.019341] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9300eee2-f498-4625-b90f-ca6c856fbdd0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.051850] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349f32d3-7fd1-4fe8-a3b6-205a626aa118 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.057497] env[69992]: DEBUG nova.network.neutron [-] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.064661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1f0369-9f95-4542-b077-34af821d86b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.080365] env[69992]: DEBUG nova.compute.provider_tree [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1314.151823] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897804, 'name': CreateVM_Task, 'duration_secs': 0.598547} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.151961] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1314.152679] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.152838] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.153174] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1314.153752] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f78aab75-e520-42c7-9e32-e79e1e224c24 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.158226] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1314.158226] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52810c16-b0da-1cf7-15f7-e2fbaeff10a3" [ 1314.158226] env[69992]: _type = "Task" [ 1314.158226] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.165831] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52810c16-b0da-1cf7-15f7-e2fbaeff10a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.195686] env[69992]: DEBUG nova.network.neutron [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [{"id": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "address": "fa:16:3e:7b:a7:d0", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15455be6-d2", "ovs_interfaceid": "15455be6-d2df-46a9-bd15-7872eadb1ab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.275209] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.275470] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.285818] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897806, 'name': Rename_Task, 'duration_secs': 0.142321} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.286594] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.286836] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b735538-c1ae-426c-b540-6272a5488925 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.293135] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1314.293135] env[69992]: value = "task-2897807" [ 1314.293135] env[69992]: _type = "Task" [ 1314.293135] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.302591] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897807, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.379602] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-45a00234-7ebf-4835-bad3-30474bb27148" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.379746] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-45a00234-7ebf-4835-bad3-30474bb27148" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.379927] env[69992]: DEBUG nova.network.neutron [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.423800] env[69992]: DEBUG oslo_concurrency.lockutils [req-68c914ab-073e-4dbb-bf6f-fd11d763bf39 req-1227bc09-dfcd-40e5-a591-6b7893f802c5 service nova] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.560686] env[69992]: INFO nova.compute.manager [-] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Took 1.38 seconds to deallocate network for instance. [ 1314.583997] env[69992]: DEBUG nova.scheduler.client.report [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1314.671035] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52810c16-b0da-1cf7-15f7-e2fbaeff10a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009895} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.671359] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.671598] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1314.671842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.672033] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.672230] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1314.672501] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39147db1-c1d7-4e15-8f54-bb65588ccc2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.682357] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1314.682551] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1314.683495] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddfd0751-7a66-4c62-9b5a-550bfbb434cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.689827] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1314.689827] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52964297-c6a4-1656-2739-4433f612cadc" [ 1314.689827] env[69992]: _type = "Task" [ 1314.689827] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.698973] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-31109fbd-ebc0-422d-a705-7d0e59d4bbb4" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.699410] env[69992]: DEBUG nova.objects.instance [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lazy-loading 'migration_context' on Instance uuid 31109fbd-ebc0-422d-a705-7d0e59d4bbb4 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1314.700608] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52964297-c6a4-1656-2739-4433f612cadc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.780641] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1314.804871] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897807, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.897223] env[69992]: DEBUG nova.compute.manager [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Received event network-vif-deleted-85e3736f-8bdc-4d92-b8c4-fef12e1a3bf4 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1314.897223] env[69992]: DEBUG nova.compute.manager [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Received event network-vif-plugged-62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1314.897223] env[69992]: DEBUG oslo_concurrency.lockutils [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] Acquiring lock "45a00234-7ebf-4835-bad3-30474bb27148-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.897223] env[69992]: DEBUG oslo_concurrency.lockutils [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] Lock "45a00234-7ebf-4835-bad3-30474bb27148-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.897223] env[69992]: DEBUG oslo_concurrency.lockutils [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] Lock "45a00234-7ebf-4835-bad3-30474bb27148-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.897223] env[69992]: DEBUG nova.compute.manager [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] No waiting events found dispatching network-vif-plugged-62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1314.897491] env[69992]: WARNING nova.compute.manager [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Received unexpected event network-vif-plugged-62dc8388-0e1c-4ec8-8f41-8e1feaa83858 for instance with vm_state building and task_state spawning. [ 1314.897628] env[69992]: DEBUG nova.compute.manager [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Received event network-changed-62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1314.897880] env[69992]: DEBUG nova.compute.manager [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Refreshing instance network info cache due to event network-changed-62dc8388-0e1c-4ec8-8f41-8e1feaa83858. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1314.898131] env[69992]: DEBUG oslo_concurrency.lockutils [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] Acquiring lock "refresh_cache-45a00234-7ebf-4835-bad3-30474bb27148" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.926046] env[69992]: DEBUG nova.network.neutron [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.048267] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1315.048267] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582083', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'name': 'volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe', 'attached_at': '', 'detached_at': '', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'serial': '5fab4ca8-04ab-4575-95a1-e55e5a73415d'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1315.048775] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08012a15-9b18-4e4e-bade-4d089f9dc3bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.066798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.067805] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcb9786-a166-4bc5-8c00-7b1ecb06b125 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.093647] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d/volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1315.094506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.096598] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdff9291-a0b9-4471-8df5-b9957974d68b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.115467] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.364s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.115467] env[69992]: DEBUG nova.objects.instance [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lazy-loading 'resources' on Instance uuid ae681491-c03e-486f-b763-0ebfa4dcd669 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1315.121208] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1315.121208] env[69992]: value = "task-2897808" [ 1315.121208] env[69992]: _type = "Task" [ 1315.121208] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.133811] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897808, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.134914] env[69992]: INFO nova.scheduler.client.report [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleted allocations for instance a8813822-f77b-4b73-a6dc-e0eab83b0402 [ 1315.203640] env[69992]: DEBUG nova.objects.base [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Object Instance<31109fbd-ebc0-422d-a705-7d0e59d4bbb4> lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1315.204016] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52964297-c6a4-1656-2739-4433f612cadc, 'name': SearchDatastore_Task, 'duration_secs': 0.01215} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.207170] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c003a901-2b40-4655-9eb6-09d4df0afacb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.211336] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb04b848-b795-41f2-b0ad-4647559cb27e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.217694] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1315.217694] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529b548c-b347-8236-617b-bf01039b98c5" [ 1315.217694] env[69992]: _type = "Task" [ 1315.217694] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.237681] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20c581ae-2bf0-4820-a8f8-34c550fe5afe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.248963] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529b548c-b347-8236-617b-bf01039b98c5, 'name': SearchDatastore_Task, 'duration_secs': 0.011098} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.252462] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.252462] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 6ccc70f5-4857-4af3-99a1-f60ec35aebaf/6ccc70f5-4857-4af3-99a1-f60ec35aebaf.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1315.252462] env[69992]: DEBUG oslo_vmware.api [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1315.252462] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52330a31-f2b4-90e2-3897-6acc789aa20d" [ 1315.252462] env[69992]: _type = "Task" [ 1315.252462] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.252883] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a20a698-7521-4e44-bf28-bf96cfafea17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.267613] env[69992]: DEBUG oslo_vmware.api [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52330a31-f2b4-90e2-3897-6acc789aa20d, 'name': SearchDatastore_Task, 'duration_secs': 0.006364} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.269658] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.270211] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1315.270211] env[69992]: value = "task-2897809" [ 1315.270211] env[69992]: _type = "Task" [ 1315.270211] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.281922] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.305063] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897807, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.306354] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.320241] env[69992]: DEBUG nova.network.neutron [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Updating instance_info_cache with network_info: [{"id": "62dc8388-0e1c-4ec8-8f41-8e1feaa83858", "address": "fa:16:3e:b0:c2:2f", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62dc8388-0e", "ovs_interfaceid": "62dc8388-0e1c-4ec8-8f41-8e1feaa83858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.634638] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.654403] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c2e417d3-ee76-4ad0-be53-5afbdc757db8 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "a8813822-f77b-4b73-a6dc-e0eab83b0402" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.785s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.782603] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897809, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.807476] env[69992]: DEBUG oslo_vmware.api [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897807, 'name': PowerOnVM_Task, 'duration_secs': 1.10286} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.807672] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.807867] env[69992]: INFO nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Took 7.44 seconds to spawn the instance on the hypervisor. [ 1315.808481] env[69992]: DEBUG nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1315.809561] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0611a1c8-2306-4804-aa60-48c85b0d567e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.822896] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-45a00234-7ebf-4835-bad3-30474bb27148" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.823981] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Instance network_info: |[{"id": "62dc8388-0e1c-4ec8-8f41-8e1feaa83858", "address": "fa:16:3e:b0:c2:2f", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62dc8388-0e", "ovs_interfaceid": "62dc8388-0e1c-4ec8-8f41-8e1feaa83858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1315.823981] env[69992]: DEBUG oslo_concurrency.lockutils [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] Acquired lock "refresh_cache-45a00234-7ebf-4835-bad3-30474bb27148" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.823981] env[69992]: DEBUG nova.network.neutron [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Refreshing network info cache for port 62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1315.829019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:c2:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62dc8388-0e1c-4ec8-8f41-8e1feaa83858', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1315.833425] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1315.837104] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1315.837556] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-beaddb9a-870b-4f7c-aac1-d339078fc2af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.865972] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1315.865972] env[69992]: value = "task-2897810" [ 1315.865972] env[69992]: _type = "Task" [ 1315.865972] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.875685] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897810, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.999972] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83eea21-93ec-4f3c-9902-d29cb92b54fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.009226] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a688a8c2-622d-49e0-9a05-8e9f68b69b52 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.044510] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84858b8e-9eee-4a2e-a04d-445eb334616c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.053014] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b489aea7-a110-4313-ae60-65af02d24fa5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.066744] env[69992]: DEBUG nova.compute.provider_tree [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1316.132937] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897808, 'name': ReconfigVM_Task, 'duration_secs': 0.596596} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.133342] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to attach disk [datastore1] volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d/volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1316.139141] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3ba5c34-b0b6-4234-b1fb-47afbbf4d105 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.155903] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1316.155903] env[69992]: value = "task-2897811" [ 1316.155903] env[69992]: _type = "Task" [ 1316.155903] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.166681] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897811, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.280816] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897809, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.333742] env[69992]: INFO nova.compute.manager [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Took 34.80 seconds to build instance. [ 1316.382109] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897810, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.571145] env[69992]: DEBUG nova.scheduler.client.report [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1316.619914] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "eec50935-f553-43c7-b67b-7289299745bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.620193] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "eec50935-f553-43c7-b67b-7289299745bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.620401] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "eec50935-f553-43c7-b67b-7289299745bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.620581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "eec50935-f553-43c7-b67b-7289299745bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.620759] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "eec50935-f553-43c7-b67b-7289299745bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.622948] env[69992]: INFO nova.compute.manager [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Terminating instance [ 1316.665820] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897811, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.703498] env[69992]: DEBUG nova.network.neutron [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Updated VIF entry in instance network info cache for port 62dc8388-0e1c-4ec8-8f41-8e1feaa83858. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1316.703856] env[69992]: DEBUG nova.network.neutron [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Updating instance_info_cache with network_info: [{"id": "62dc8388-0e1c-4ec8-8f41-8e1feaa83858", "address": "fa:16:3e:b0:c2:2f", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62dc8388-0e", "ovs_interfaceid": "62dc8388-0e1c-4ec8-8f41-8e1feaa83858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.781369] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897809, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.839232] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e9730f62-3108-4e2a-a8fd-6e101572ce4a tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.315s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.880515] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897810, 'name': CreateVM_Task, 'duration_secs': 0.690919} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.880727] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1316.881660] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.881892] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.882314] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1316.882632] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-406d941c-aad1-4df9-b90e-b9bdcb26d556 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.887986] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1316.887986] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f7169-fb6d-5274-1cf3-905331ab6b22" [ 1316.887986] env[69992]: _type = "Task" [ 1316.887986] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.897758] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528f7169-fb6d-5274-1cf3-905331ab6b22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.077768] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.080209] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.504s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.081874] env[69992]: INFO nova.compute.claims [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1317.106021] env[69992]: INFO nova.scheduler.client.report [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Deleted allocations for instance ae681491-c03e-486f-b763-0ebfa4dcd669 [ 1317.126936] env[69992]: DEBUG nova.compute.manager [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1317.127313] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1317.128506] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3f9dee-2e32-4ea6-a91c-7593d6d995ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.136978] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1317.137301] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1279abf7-7851-42d9-b506-710c3f5a0b25 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.143476] env[69992]: DEBUG oslo_vmware.api [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1317.143476] env[69992]: value = "task-2897812" [ 1317.143476] env[69992]: _type = "Task" [ 1317.143476] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.152832] env[69992]: DEBUG oslo_vmware.api [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.167351] env[69992]: DEBUG oslo_vmware.api [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897811, 'name': ReconfigVM_Task, 'duration_secs': 1.005981} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.167599] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582083', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'name': 'volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe', 'attached_at': '', 'detached_at': '', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'serial': '5fab4ca8-04ab-4575-95a1-e55e5a73415d'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1317.207226] env[69992]: DEBUG oslo_concurrency.lockutils [req-596baf1d-951f-44c3-94e2-57d23b8a1c4d req-4a6c59df-a0ae-4feb-a2ea-7987579f9103 service nova] Releasing lock "refresh_cache-45a00234-7ebf-4835-bad3-30474bb27148" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1317.228426] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "interface-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.228701] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "interface-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.229143] env[69992]: DEBUG nova.objects.instance [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lazy-loading 'flavor' on Instance uuid 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.283023] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897809, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.647117} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.283738] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 6ccc70f5-4857-4af3-99a1-f60ec35aebaf/6ccc70f5-4857-4af3-99a1-f60ec35aebaf.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1317.283970] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1317.284279] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9122efd-37af-4a69-a596-d06ff6f32f82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.290332] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1317.290332] env[69992]: value = "task-2897813" [ 1317.290332] env[69992]: _type = "Task" [ 1317.290332] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.297339] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897813, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.399196] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528f7169-fb6d-5274-1cf3-905331ab6b22, 'name': SearchDatastore_Task, 'duration_secs': 0.030732} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.399525] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1317.399758] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1317.400009] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.400164] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.400400] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.400695] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-245b76a4-abb9-4780-b5f0-0633ed235618 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.411939] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1317.412275] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1317.412882] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-590aaad1-360d-44ec-be43-5967bb3935fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.417936] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1317.417936] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52cb3046-91c2-a0e9-2d4e-ba110ce98bf3" [ 1317.417936] env[69992]: _type = "Task" [ 1317.417936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.425810] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cb3046-91c2-a0e9-2d4e-ba110ce98bf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.615751] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0d14aa9-ef40-4bdd-96b5-26b83059cfc7 tempest-ServerTagsTestJSON-1940887869 tempest-ServerTagsTestJSON-1940887869-project-member] Lock "ae681491-c03e-486f-b763-0ebfa4dcd669" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.234s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.654085] env[69992]: DEBUG oslo_vmware.api [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897812, 'name': PowerOffVM_Task, 'duration_secs': 0.23225} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.654365] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1317.654536] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1317.654781] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e1fc48d-4651-40d3-a013-a506c66c28ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.726297] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1317.726539] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1317.726723] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleting the datastore file [datastore2] eec50935-f553-43c7-b67b-7289299745bd {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1317.726994] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7179b89-01a2-4f7c-89d0-efc1ce909d7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.733065] env[69992]: DEBUG nova.objects.instance [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lazy-loading 'pci_requests' on Instance uuid 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.744037] env[69992]: DEBUG oslo_vmware.api [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for the task: (returnval){ [ 1317.744037] env[69992]: value = "task-2897815" [ 1317.744037] env[69992]: _type = "Task" [ 1317.744037] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.757733] env[69992]: DEBUG oslo_vmware.api [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.801474] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897813, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071475} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.801763] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1317.802862] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d62a7bc-a1d8-4035-8c27-373e26073a95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.827945] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 6ccc70f5-4857-4af3-99a1-f60ec35aebaf/6ccc70f5-4857-4af3-99a1-f60ec35aebaf.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.828623] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fdbff38-64d7-4dd4-9314-0b346b13dedd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.852567] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1317.852567] env[69992]: value = "task-2897816" [ 1317.852567] env[69992]: _type = "Task" [ 1317.852567] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.861986] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897816, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.934220] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52cb3046-91c2-a0e9-2d4e-ba110ce98bf3, 'name': SearchDatastore_Task, 'duration_secs': 0.019545} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.935247] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a9df4bb-f012-48c1-8c3d-081a9c7758ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.941557] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1317.941557] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527862e0-2322-d8b6-723b-b3ef6b1b7d6b" [ 1317.941557] env[69992]: _type = "Task" [ 1317.941557] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.950268] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527862e0-2322-d8b6-723b-b3ef6b1b7d6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.218574] env[69992]: DEBUG nova.objects.instance [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'flavor' on Instance uuid 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.245098] env[69992]: DEBUG nova.objects.base [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Object Instance<1cf5a6d2-8ec9-429a-9c31-eb3c699389d9> lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1318.245343] env[69992]: DEBUG nova.network.neutron [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1318.260371] env[69992]: DEBUG oslo_vmware.api [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Task: {'id': task-2897815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264302} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.260634] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1318.260800] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1318.261029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1318.261958] env[69992]: INFO nova.compute.manager [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] [instance: eec50935-f553-43c7-b67b-7289299745bd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1318.261958] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1318.261958] env[69992]: DEBUG nova.compute.manager [-] [instance: eec50935-f553-43c7-b67b-7289299745bd] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1318.261958] env[69992]: DEBUG nova.network.neutron [-] [instance: eec50935-f553-43c7-b67b-7289299745bd] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1318.361334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8455ea08-0ff7-4a46-bdb6-f6f721b23ad9 tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "interface-1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.132s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.371097] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897816, 'name': ReconfigVM_Task, 'duration_secs': 0.319566} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.375923] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 6ccc70f5-4857-4af3-99a1-f60ec35aebaf/6ccc70f5-4857-4af3-99a1-f60ec35aebaf.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.378047] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21efa9e0-0fc7-4779-97d0-54ca3983d992 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.386948] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1318.386948] env[69992]: value = "task-2897817" [ 1318.386948] env[69992]: _type = "Task" [ 1318.386948] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.409676] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897817, 'name': Rename_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.416338] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98fdd8d-853a-409d-a7a8-49abaf797222 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.426256] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4da945-a790-4e4f-ac65-2e7c43ec1292 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.465252] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf14a52-e613-474d-ab7b-bbdbd771e448 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.473669] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527862e0-2322-d8b6-723b-b3ef6b1b7d6b, 'name': SearchDatastore_Task, 'duration_secs': 0.010001} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.476206] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.476499] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 45a00234-7ebf-4835-bad3-30474bb27148/45a00234-7ebf-4835-bad3-30474bb27148.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1318.476835] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad8a2eec-957f-4ce0-b53b-a50315f4d56d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.479905] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39003ddb-1ef0-4301-8c8c-dfb4248900dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.496677] env[69992]: DEBUG nova.compute.provider_tree [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.499234] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1318.499234] env[69992]: value = "task-2897818" [ 1318.499234] env[69992]: _type = "Task" [ 1318.499234] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.506341] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.723269] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5b91209a-7708-4d06-843f-d555e0d51735 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.823s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.901741] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897817, 'name': Rename_Task, 'duration_secs': 0.190373} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.902089] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1318.902349] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ede6a4b-1de0-47dc-bc59-da615d03e4dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.909284] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1318.909284] env[69992]: value = "task-2897819" [ 1318.909284] env[69992]: _type = "Task" [ 1318.909284] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.916891] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.939608] env[69992]: DEBUG nova.compute.manager [req-5108bd4d-ec61-40a6-931d-0622eccf6748 req-92418e61-495f-479e-bf67-e2e8b6101f88 service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Received event network-vif-deleted-ae0113e0-6fd4-44a9-b496-7e09ffb4539b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1318.939784] env[69992]: INFO nova.compute.manager [req-5108bd4d-ec61-40a6-931d-0622eccf6748 req-92418e61-495f-479e-bf67-e2e8b6101f88 service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Neutron deleted interface ae0113e0-6fd4-44a9-b496-7e09ffb4539b; detaching it from the instance and deleting it from the info cache [ 1318.939954] env[69992]: DEBUG nova.network.neutron [req-5108bd4d-ec61-40a6-931d-0622eccf6748 req-92418e61-495f-479e-bf67-e2e8b6101f88 service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.999926] env[69992]: DEBUG nova.scheduler.client.report [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.013053] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897818, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.239973] env[69992]: DEBUG nova.compute.manager [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1319.346491] env[69992]: DEBUG nova.network.neutron [-] [instance: eec50935-f553-43c7-b67b-7289299745bd] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.423952] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897819, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.442870] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1cc9de58-5fb6-4f13-a156-ed1beccac2ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.452821] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3278e69c-5df3-49bb-a865-14ba00160e38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.493520] env[69992]: DEBUG nova.compute.manager [req-5108bd4d-ec61-40a6-931d-0622eccf6748 req-92418e61-495f-479e-bf67-e2e8b6101f88 service nova] [instance: eec50935-f553-43c7-b67b-7289299745bd] Detach interface failed, port_id=ae0113e0-6fd4-44a9-b496-7e09ffb4539b, reason: Instance eec50935-f553-43c7-b67b-7289299745bd could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1319.510279] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.510959] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1319.518943] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.346s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.520942] env[69992]: INFO nova.compute.claims [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.533020] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897818, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.802482} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.533020] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 45a00234-7ebf-4835-bad3-30474bb27148/45a00234-7ebf-4835-bad3-30474bb27148.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1319.533020] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1319.533020] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d8e9ef8-5505-49b4-a461-a292d86b511d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.540087] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1319.540087] env[69992]: value = "task-2897820" [ 1319.540087] env[69992]: _type = "Task" [ 1319.540087] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.549761] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897820, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.765695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.851299] env[69992]: INFO nova.compute.manager [-] [instance: eec50935-f553-43c7-b67b-7289299745bd] Took 1.59 seconds to deallocate network for instance. [ 1319.927880] env[69992]: DEBUG oslo_vmware.api [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897819, 'name': PowerOnVM_Task, 'duration_secs': 0.732538} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.927880] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1319.928546] env[69992]: INFO nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1319.928779] env[69992]: DEBUG nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1319.929903] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e185c251-3601-4130-852f-afe9f93c0c77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.024018] env[69992]: DEBUG nova.compute.utils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1320.024018] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1320.024018] env[69992]: DEBUG nova.network.neutron [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1320.057895] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897820, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086154} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.057895] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1320.057895] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bfeb6e-7222-40c5-8efa-6a1180071bd2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.083141] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 45a00234-7ebf-4835-bad3-30474bb27148/45a00234-7ebf-4835-bad3-30474bb27148.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1320.085292] env[69992]: DEBUG nova.policy [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8380e6e9bb87424793504916dbc01790', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34923aa9da1d46cc9d22d569d9428781', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1320.087318] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f86f0fa8-409e-4556-93cd-13dda7fb252d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.117271] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1320.117271] env[69992]: value = "task-2897821" [ 1320.117271] env[69992]: _type = "Task" [ 1320.117271] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.130970] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897821, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.360221] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.395275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.395275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.395275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.395275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.395275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.396969] env[69992]: INFO nova.compute.manager [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Terminating instance [ 1320.449716] env[69992]: DEBUG nova.network.neutron [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Successfully created port: a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1320.457083] env[69992]: INFO nova.compute.manager [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Took 34.87 seconds to build instance. [ 1320.528727] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1320.633079] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897821, 'name': ReconfigVM_Task, 'duration_secs': 0.302205} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.636560] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 45a00234-7ebf-4835-bad3-30474bb27148/45a00234-7ebf-4835-bad3-30474bb27148.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1320.637816] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f181f92-0a7d-4e9a-a336-2bac63d13453 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.648416] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1320.648416] env[69992]: value = "task-2897822" [ 1320.648416] env[69992]: _type = "Task" [ 1320.648416] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.660140] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897822, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.894551] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c0e198-222f-49ef-9d3c-45afd0ac9fdc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.902310] env[69992]: DEBUG nova.compute.manager [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1320.902310] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1320.906018] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e639814-9b27-410c-bfb5-d838398392dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.906386] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705d8176-9949-43e3-bce4-fed7a42156fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.913539] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.937926] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9298875-84b5-4d65-8077-5593eb318bfb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.941108] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf88f938-d075-450d-bf2f-c8bac8dedabd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.949920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b68bed6-bb61-438f-afb8-60f2e43e6e67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.954855] env[69992]: DEBUG oslo_vmware.api [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1320.954855] env[69992]: value = "task-2897823" [ 1320.954855] env[69992]: _type = "Task" [ 1320.954855] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.968147] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84bc839d-b26b-449d-8d29-b602ac9d7dd6 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.394s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.968611] env[69992]: DEBUG nova.compute.provider_tree [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1320.974569] env[69992]: DEBUG oslo_vmware.api [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.162023] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897822, 'name': Rename_Task, 'duration_secs': 0.152763} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.162023] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1321.162023] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c986c4a9-635c-4274-a520-38b9bee91c57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.166945] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1321.166945] env[69992]: value = "task-2897824" [ 1321.166945] env[69992]: _type = "Task" [ 1321.166945] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.175772] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.465571] env[69992]: DEBUG oslo_vmware.api [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897823, 'name': PowerOffVM_Task, 'duration_secs': 0.211984} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.465872] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.466504] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1321.466504] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d05e426-2720-476b-b322-1cdfae48a5d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.472112] env[69992]: DEBUG nova.scheduler.client.report [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1321.539893] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1321.542988] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1321.543241] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1321.543440] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Deleting the datastore file [datastore1] 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1321.543919] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a731fd1-4aa6-4ae9-aa7a-4f77b5179978 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.551772] env[69992]: DEBUG oslo_vmware.api [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for the task: (returnval){ [ 1321.551772] env[69992]: value = "task-2897826" [ 1321.551772] env[69992]: _type = "Task" [ 1321.551772] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.554237] env[69992]: DEBUG nova.compute.manager [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1321.554503] env[69992]: DEBUG nova.compute.manager [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing instance network info cache due to event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1321.554725] env[69992]: DEBUG oslo_concurrency.lockutils [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.555244] env[69992]: DEBUG oslo_concurrency.lockutils [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.555244] env[69992]: DEBUG nova.network.neutron [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1321.566842] env[69992]: DEBUG oslo_vmware.api [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.577928] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1321.578110] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1321.578180] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1321.578353] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1321.578497] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1321.578641] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1321.578884] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1321.579045] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1321.579257] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1321.579413] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1321.579555] env[69992]: DEBUG nova.virt.hardware [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1321.581859] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e131f4-78a8-457b-8299-f425a733b230 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.590142] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28769447-577e-4f86-809d-78778ba91d07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.678282] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897824, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.710037] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.710147] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.962858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "3f44442d-82b1-4669-8d65-0088d4a9babb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.964046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.981027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.981027] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1321.982059] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.806s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.983725] env[69992]: INFO nova.compute.claims [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1322.023592] env[69992]: DEBUG nova.network.neutron [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Successfully updated port: a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1322.065440] env[69992]: DEBUG oslo_vmware.api [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Task: {'id': task-2897826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185254} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.065701] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1322.065888] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1322.066186] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1322.066416] env[69992]: INFO nova.compute.manager [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1322.066668] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1322.066887] env[69992]: DEBUG nova.compute.manager [-] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1322.067664] env[69992]: DEBUG nova.network.neutron [-] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1322.183732] env[69992]: DEBUG oslo_vmware.api [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897824, 'name': PowerOnVM_Task, 'duration_secs': 0.58946} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.184166] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1322.184476] env[69992]: INFO nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Took 8.90 seconds to spawn the instance on the hypervisor. [ 1322.184759] env[69992]: DEBUG nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1322.186093] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad086239-d3c1-4b71-afe1-5e3d68459303 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.213624] env[69992]: DEBUG nova.compute.utils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1322.467021] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1322.491402] env[69992]: DEBUG nova.compute.utils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1322.497452] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1322.497625] env[69992]: DEBUG nova.network.neutron [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1322.525985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "refresh_cache-48558980-2800-4f5b-80ce-d59552445c3f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.526369] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired lock "refresh_cache-48558980-2800-4f5b-80ce-d59552445c3f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1322.526554] env[69992]: DEBUG nova.network.neutron [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.532177] env[69992]: DEBUG nova.compute.manager [req-ed16223d-5e54-4a48-ac73-d195588bd0a6 req-991d88c5-757e-400f-906a-9ae0b8d0df08 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Received event network-vif-deleted-b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1322.532177] env[69992]: INFO nova.compute.manager [req-ed16223d-5e54-4a48-ac73-d195588bd0a6 req-991d88c5-757e-400f-906a-9ae0b8d0df08 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Neutron deleted interface b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca; detaching it from the instance and deleting it from the info cache [ 1322.532177] env[69992]: DEBUG nova.network.neutron [req-ed16223d-5e54-4a48-ac73-d195588bd0a6 req-991d88c5-757e-400f-906a-9ae0b8d0df08 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.553895] env[69992]: DEBUG nova.policy [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdc7f71c9c4b4d40bf40b631c24b5ee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17ab89c6cf054418a4dd1a0e61b3a5e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1322.564675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.564675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.595626] env[69992]: DEBUG nova.network.neutron [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updated VIF entry in instance network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1322.595686] env[69992]: DEBUG nova.network.neutron [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.707594] env[69992]: INFO nova.compute.manager [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Took 32.64 seconds to build instance. [ 1322.717292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.900753] env[69992]: DEBUG nova.network.neutron [-] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.934701] env[69992]: DEBUG nova.network.neutron [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Successfully created port: 1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1322.996552] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.997126] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1323.033734] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8149822-ac8f-4eef-af90-26c7018a37a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.047646] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eebe34-9df3-41f2-bfc9-d106c54101b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.064792] env[69992]: DEBUG nova.network.neutron [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1323.067542] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1323.090904] env[69992]: DEBUG nova.compute.manager [req-ed16223d-5e54-4a48-ac73-d195588bd0a6 req-991d88c5-757e-400f-906a-9ae0b8d0df08 service nova] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Detach interface failed, port_id=b6a0e7a4-6bd1-44df-af1c-9f2fd60387ca, reason: Instance 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1323.100022] env[69992]: DEBUG oslo_concurrency.lockutils [req-e2476c94-1079-4998-beda-92186769d22b req-59130c28-2a76-48db-96f6-09d69aab8ee6 service nova] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.209997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-bff94c7e-2f30-49dc-bff1-cfc7f292cb82 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.148s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.215021] env[69992]: DEBUG nova.network.neutron [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Updating instance_info_cache with network_info: [{"id": "a4e3b7ab-de40-43e1-b9e2-222d0126cf7a", "address": "fa:16:3e:ed:92:5b", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4e3b7ab-de", "ovs_interfaceid": "a4e3b7ab-de40-43e1-b9e2-222d0126cf7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.354640] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c417fa6-e255-46f8-a370-15099280d181 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.362656] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93ac0bd-e446-4406-9ad7-2f823a718b8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.394849] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c04b446-6d59-468d-98b9-5afcf7c1af41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.402284] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0aa8eb-ba20-4296-a668-f6bd9f481676 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.407042] env[69992]: INFO nova.compute.manager [-] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Took 1.34 seconds to deallocate network for instance. [ 1323.419853] env[69992]: DEBUG nova.compute.provider_tree [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.579361] env[69992]: DEBUG nova.compute.manager [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Received event network-vif-plugged-a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1323.579598] env[69992]: DEBUG oslo_concurrency.lockutils [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] Acquiring lock "48558980-2800-4f5b-80ce-d59552445c3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.579785] env[69992]: DEBUG oslo_concurrency.lockutils [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] Lock "48558980-2800-4f5b-80ce-d59552445c3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.579947] env[69992]: DEBUG oslo_concurrency.lockutils [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] Lock "48558980-2800-4f5b-80ce-d59552445c3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.580130] env[69992]: DEBUG nova.compute.manager [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] No waiting events found dispatching network-vif-plugged-a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1323.580294] env[69992]: WARNING nova.compute.manager [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Received unexpected event network-vif-plugged-a4e3b7ab-de40-43e1-b9e2-222d0126cf7a for instance with vm_state building and task_state spawning. [ 1323.580450] env[69992]: DEBUG nova.compute.manager [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Received event network-changed-a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1323.580609] env[69992]: DEBUG nova.compute.manager [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Refreshing instance network info cache due to event network-changed-a4e3b7ab-de40-43e1-b9e2-222d0126cf7a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1323.580769] env[69992]: DEBUG oslo_concurrency.lockutils [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] Acquiring lock "refresh_cache-48558980-2800-4f5b-80ce-d59552445c3f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.590182] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.717315] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Releasing lock "refresh_cache-48558980-2800-4f5b-80ce-d59552445c3f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.717631] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Instance network_info: |[{"id": "a4e3b7ab-de40-43e1-b9e2-222d0126cf7a", "address": "fa:16:3e:ed:92:5b", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4e3b7ab-de", "ovs_interfaceid": "a4e3b7ab-de40-43e1-b9e2-222d0126cf7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1323.717921] env[69992]: DEBUG oslo_concurrency.lockutils [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] Acquired lock "refresh_cache-48558980-2800-4f5b-80ce-d59552445c3f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.718114] env[69992]: DEBUG nova.network.neutron [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Refreshing network info cache for port a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.719357] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:92:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4e3b7ab-de40-43e1-b9e2-222d0126cf7a', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1323.728934] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1323.730030] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1323.730591] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-973ae4c8-51ce-4b01-b57e-b963e15518ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.749721] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1323.749721] env[69992]: value = "task-2897827" [ 1323.749721] env[69992]: _type = "Task" [ 1323.749721] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.761831] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897827, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.798376] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.799065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.799367] env[69992]: INFO nova.compute.manager [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Attaching volume 23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74 to /dev/sdb [ 1323.832040] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fecd977-3bca-4590-b199-d0a5944d40cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.839423] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0faff8-9e22-4855-b035-b1645846f444 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.855163] env[69992]: DEBUG nova.virt.block_device [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updating existing volume attachment record: 3d3731a4-c3fd-45c0-b6e4-6e916047b6fc {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1323.923922] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.923922] env[69992]: DEBUG nova.scheduler.client.report [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1323.928550] env[69992]: DEBUG oslo_concurrency.lockutils [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "45a00234-7ebf-4835-bad3-30474bb27148" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.928788] env[69992]: DEBUG oslo_concurrency.lockutils [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.929015] env[69992]: DEBUG nova.compute.manager [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1323.929885] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860c47f2-d6ee-4930-8d91-543ddc8b84a4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.936935] env[69992]: DEBUG nova.compute.manager [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1323.937590] env[69992]: DEBUG nova.objects.instance [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'flavor' on Instance uuid 45a00234-7ebf-4835-bad3-30474bb27148 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.013547] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1324.042372] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1324.042646] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1324.042809] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1324.043018] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1324.043181] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1324.043336] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1324.043555] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1324.043954] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1324.044139] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1324.044187] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1324.044343] env[69992]: DEBUG nova.virt.hardware [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1324.045216] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e92c0a-14cb-42bb-88fc-ab30da941058 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.052574] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75222fe-07cc-4b3d-aa41-7f73391bc3d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.261331] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897827, 'name': CreateVM_Task, 'duration_secs': 0.300754} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.261534] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1324.262332] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.262519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1324.262860] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1324.263181] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57e73828-bd78-4fff-9d3a-a7f11c380774 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.268848] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1324.268848] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521bd50b-3c8a-49f1-37f2-a4fb3dc14ee7" [ 1324.268848] env[69992]: _type = "Task" [ 1324.268848] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.279907] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521bd50b-3c8a-49f1-37f2-a4fb3dc14ee7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.432078] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.432078] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1324.433423] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.367s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.433759] env[69992]: DEBUG nova.objects.instance [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lazy-loading 'resources' on Instance uuid be28d7a8-6566-45aa-8b4c-08c7eb29864d {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.482989] env[69992]: DEBUG nova.network.neutron [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Updated VIF entry in instance network info cache for port a4e3b7ab-de40-43e1-b9e2-222d0126cf7a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.483316] env[69992]: DEBUG nova.network.neutron [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Updating instance_info_cache with network_info: [{"id": "a4e3b7ab-de40-43e1-b9e2-222d0126cf7a", "address": "fa:16:3e:ed:92:5b", "network": {"id": "d02e51d7-72be-4f6c-aa90-0ada60f8d7f7", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1603378574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34923aa9da1d46cc9d22d569d9428781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4e3b7ab-de", "ovs_interfaceid": "a4e3b7ab-de40-43e1-b9e2-222d0126cf7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.515831] env[69992]: DEBUG nova.network.neutron [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Successfully updated port: 1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1324.554871] env[69992]: DEBUG nova.compute.manager [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Received event network-vif-plugged-1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1324.554871] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.554871] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.554871] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.555757] env[69992]: DEBUG nova.compute.manager [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] No waiting events found dispatching network-vif-plugged-1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1324.555847] env[69992]: WARNING nova.compute.manager [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Received unexpected event network-vif-plugged-1f44518f-713e-4671-bc22-96c67ac28c8e for instance with vm_state building and task_state spawning. [ 1324.556017] env[69992]: DEBUG nova.compute.manager [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Received event network-changed-1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1324.556352] env[69992]: DEBUG nova.compute.manager [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Refreshing instance network info cache due to event network-changed-1f44518f-713e-4671-bc22-96c67ac28c8e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1324.556352] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] Acquiring lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.556486] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] Acquired lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1324.556646] env[69992]: DEBUG nova.network.neutron [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Refreshing network info cache for port 1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1324.779349] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521bd50b-3c8a-49f1-37f2-a4fb3dc14ee7, 'name': SearchDatastore_Task, 'duration_secs': 0.011185} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.779715] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.779861] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1324.780112] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.780265] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1324.780446] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1324.780714] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50138a76-6753-4866-ae55-241dd7d83482 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.788924] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1324.789283] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1324.789863] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bde278e6-d37d-49eb-b093-0aa562887324 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.795096] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1324.795096] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5264f58c-f4e6-eadd-7c34-80380bca54fa" [ 1324.795096] env[69992]: _type = "Task" [ 1324.795096] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.802507] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5264f58c-f4e6-eadd-7c34-80380bca54fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.940102] env[69992]: DEBUG nova.compute.utils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1324.948118] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1324.948325] env[69992]: DEBUG nova.network.neutron [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1324.953292] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.953838] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22f1fd57-e1b9-42b5-8a5b-08c5caa778fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.964591] env[69992]: DEBUG oslo_vmware.api [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1324.964591] env[69992]: value = "task-2897831" [ 1324.964591] env[69992]: _type = "Task" [ 1324.964591] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.975062] env[69992]: DEBUG oslo_vmware.api [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.985957] env[69992]: DEBUG oslo_concurrency.lockutils [req-19369b44-3d5b-4d3e-98d8-c536f10c55d2 req-38e5b457-ea2c-4153-9cd9-1b2055bea9f7 service nova] Releasing lock "refresh_cache-48558980-2800-4f5b-80ce-d59552445c3f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.017450] env[69992]: DEBUG nova.policy [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad6673cee8e04b968c6afd54f9b51b74', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1471cdd6671b4e6ebc23b8fc2b120b63', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1325.019038] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.096031] env[69992]: DEBUG nova.network.neutron [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1325.169694] env[69992]: DEBUG nova.network.neutron [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.180838] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.181160] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.181361] env[69992]: INFO nova.compute.manager [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Shelving [ 1325.252480] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdab662f-4993-46fc-bffc-e7125805fef3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.264673] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fcee0c-5e60-4a5b-bf5d-f3a26063a9d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.308956] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b16736d-3b0c-4801-8283-d6b1b5a37725 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.318076] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5264f58c-f4e6-eadd-7c34-80380bca54fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009471} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.320770] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6bdc4eb-e39e-44e5-871e-1f25615055b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.323976] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc39bdf5-e134-4f73-aae9-507a7dbb0809 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.330949] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1325.330949] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52648148-3a82-355f-2809-ef99f700149a" [ 1325.330949] env[69992]: _type = "Task" [ 1325.330949] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.339230] env[69992]: DEBUG nova.compute.provider_tree [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.349163] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52648148-3a82-355f-2809-ef99f700149a, 'name': SearchDatastore_Task, 'duration_secs': 0.010433} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.349163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.349163] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 48558980-2800-4f5b-80ce-d59552445c3f/48558980-2800-4f5b-80ce-d59552445c3f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1325.349369] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e14868ca-bb66-48ff-aa69-57a13f359b57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.357715] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1325.357715] env[69992]: value = "task-2897832" [ 1325.357715] env[69992]: _type = "Task" [ 1325.357715] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.365232] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.394570] env[69992]: DEBUG nova.network.neutron [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Successfully created port: fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1325.450059] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1325.473940] env[69992]: DEBUG oslo_vmware.api [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897831, 'name': PowerOffVM_Task, 'duration_secs': 0.318905} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.474207] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1325.474534] env[69992]: DEBUG nova.compute.manager [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1325.475160] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd22f359-9750-4c41-b812-0821dd690f9b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.672821] env[69992]: DEBUG oslo_concurrency.lockutils [req-e3728e14-07f0-4283-bb0d-d35b7377731a req-82098a62-987c-4817-883c-1bea47ec71e7 service nova] Releasing lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.673245] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1325.673435] env[69992]: DEBUG nova.network.neutron [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1325.845648] env[69992]: DEBUG nova.scheduler.client.report [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1325.871153] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897832, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476486} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.871439] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 48558980-2800-4f5b-80ce-d59552445c3f/48558980-2800-4f5b-80ce-d59552445c3f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.871680] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.872469] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a91bdf91-7b09-4604-8966-c308a3179c61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.880196] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1325.880196] env[69992]: value = "task-2897833" [ 1325.880196] env[69992]: _type = "Task" [ 1325.880196] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.889671] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.992030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-db39b0d4-c2a7-4835-bc2d-8c469cd893d5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.063s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.192743] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1326.193065] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6291356c-65ad-42ae-9ce5-2bbbf7a8ebf6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.200345] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1326.200345] env[69992]: value = "task-2897835" [ 1326.200345] env[69992]: _type = "Task" [ 1326.200345] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.208741] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.210968] env[69992]: DEBUG nova.network.neutron [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1326.354283] env[69992]: DEBUG nova.network.neutron [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating instance_info_cache with network_info: [{"id": "1f44518f-713e-4671-bc22-96c67ac28c8e", "address": "fa:16:3e:ae:3c:2e", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f44518f-71", "ovs_interfaceid": "1f44518f-713e-4671-bc22-96c67ac28c8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.358027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.924s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.360667] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "45a00234-7ebf-4835-bad3-30474bb27148" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.360999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.361349] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "45a00234-7ebf-4835-bad3-30474bb27148-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.361573] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.361744] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.363928] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 11.094s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.366507] env[69992]: INFO nova.compute.manager [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Terminating instance [ 1326.386029] env[69992]: INFO nova.scheduler.client.report [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Deleted allocations for instance be28d7a8-6566-45aa-8b4c-08c7eb29864d [ 1326.397301] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069975} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.397606] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1326.398559] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f839ea5-1b56-4c08-9ca3-39a36e9ca229 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.423154] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 48558980-2800-4f5b-80ce-d59552445c3f/48558980-2800-4f5b-80ce-d59552445c3f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.423575] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f974584d-9e72-4111-8910-96baa67f91ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.444580] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1326.444580] env[69992]: value = "task-2897836" [ 1326.444580] env[69992]: _type = "Task" [ 1326.444580] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.452774] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.459161] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1326.488840] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1326.489188] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1326.489397] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1326.489624] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1326.489813] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1326.490049] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1326.490499] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1326.490575] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1326.490980] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1326.490980] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1326.491225] env[69992]: DEBUG nova.virt.hardware [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1326.492668] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388a0e3b-437d-451a-ad81-cd4ef48560fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.501483] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecb4594-75ea-4bc1-a807-ceb14e4bf09c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.711442] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897835, 'name': PowerOffVM_Task, 'duration_secs': 0.212867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.711722] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1326.712641] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715edf57-5d4e-4d21-b1b0-197e30d38de6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.733637] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa57862-9d9d-4e40-b686-5ee196c67cfc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.807394] env[69992]: DEBUG nova.compute.manager [req-58425ede-c157-4929-a811-03bb5ba6f65c req-420b4bab-94b2-4e21-8305-f07f0581f1a7 service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Received event network-vif-plugged-fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1326.807655] env[69992]: DEBUG oslo_concurrency.lockutils [req-58425ede-c157-4929-a811-03bb5ba6f65c req-420b4bab-94b2-4e21-8305-f07f0581f1a7 service nova] Acquiring lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.807842] env[69992]: DEBUG oslo_concurrency.lockutils [req-58425ede-c157-4929-a811-03bb5ba6f65c req-420b4bab-94b2-4e21-8305-f07f0581f1a7 service nova] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.808031] env[69992]: DEBUG oslo_concurrency.lockutils [req-58425ede-c157-4929-a811-03bb5ba6f65c req-420b4bab-94b2-4e21-8305-f07f0581f1a7 service nova] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.808456] env[69992]: DEBUG nova.compute.manager [req-58425ede-c157-4929-a811-03bb5ba6f65c req-420b4bab-94b2-4e21-8305-f07f0581f1a7 service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] No waiting events found dispatching network-vif-plugged-fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1326.808635] env[69992]: WARNING nova.compute.manager [req-58425ede-c157-4929-a811-03bb5ba6f65c req-420b4bab-94b2-4e21-8305-f07f0581f1a7 service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Received unexpected event network-vif-plugged-fc9715b8-215a-4627-ab81-65fa9760790c for instance with vm_state building and task_state spawning. [ 1326.864620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1326.864863] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance network_info: |[{"id": "1f44518f-713e-4671-bc22-96c67ac28c8e", "address": "fa:16:3e:ae:3c:2e", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f44518f-71", "ovs_interfaceid": "1f44518f-713e-4671-bc22-96c67ac28c8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1326.865306] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:3c:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f44518f-713e-4671-bc22-96c67ac28c8e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1326.873836] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1326.876891] env[69992]: DEBUG nova.compute.manager [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1326.877103] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1326.877770] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1326.879384] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56cecd5-bb44-4869-be8d-f74ab14db8bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.881989] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d123d2c-e3aa-494e-a3be-3a5a7e5de09d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.897928] env[69992]: DEBUG nova.network.neutron [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Successfully updated port: fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1326.907993] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7a2f0d0b-0566-4719-9d93-83ab85fee10f tempest-ServersNegativeTestMultiTenantJSON-2054196432 tempest-ServersNegativeTestMultiTenantJSON-2054196432-project-member] Lock "be28d7a8-6566-45aa-8b4c-08c7eb29864d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.382s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.917734] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1326.919290] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdd7835d-0d7f-4bf0-8090-057bdcb1c258 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.920871] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1326.920871] env[69992]: value = "task-2897837" [ 1326.920871] env[69992]: _type = "Task" [ 1326.920871] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.938423] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897837, 'name': CreateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.955940] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897836, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.989404] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1326.989404] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1326.989404] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleting the datastore file [datastore2] 45a00234-7ebf-4835-bad3-30474bb27148 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1326.990739] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a038f0b6-722a-47cf-8f75-1245d874ee02 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.997073] env[69992]: DEBUG oslo_vmware.api [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1326.997073] env[69992]: value = "task-2897839" [ 1326.997073] env[69992]: _type = "Task" [ 1326.997073] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.011704] env[69992]: DEBUG oslo_vmware.api [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.226694] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a8a394-553f-416e-a8a9-cf708acde35d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.239367] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47332274-cb13-471f-ad12-d468b2eb9b57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.245021] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1327.245378] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-62a7b075-efd9-41f6-902e-ab62f7d6602a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.254831] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1327.254831] env[69992]: value = "task-2897840" [ 1327.254831] env[69992]: _type = "Task" [ 1327.254831] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.286650] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d1321c-cc0b-4ff1-b0a3-b59313c0bc5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.300719] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8329f849-2ac6-4df9-98fb-cdfbcd2a9123 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.307301] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897840, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.318101] env[69992]: DEBUG nova.compute.provider_tree [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.409223] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "refresh_cache-2b89e218-81cc-49fc-a80a-35dde48bdd5d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.409445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "refresh_cache-2b89e218-81cc-49fc-a80a-35dde48bdd5d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.409631] env[69992]: DEBUG nova.network.neutron [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1327.434837] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897837, 'name': CreateVM_Task, 'duration_secs': 0.434479} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.435135] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1327.436262] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.436507] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.436987] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1327.437383] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bcd4559-3d6e-438b-984e-a922eeb11e55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.444105] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1327.444105] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c32ae8-841f-7801-6b7a-2ae706f26d80" [ 1327.444105] env[69992]: _type = "Task" [ 1327.444105] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.458453] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c32ae8-841f-7801-6b7a-2ae706f26d80, 'name': SearchDatastore_Task, 'duration_secs': 0.011802} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.461805] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.462082] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1327.462338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.462501] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.462683] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1327.462987] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897836, 'name': ReconfigVM_Task, 'duration_secs': 0.588962} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.463232] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6bac523-e0b0-4c90-8a11-bb3996fcd677 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.467350] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 48558980-2800-4f5b-80ce-d59552445c3f/48558980-2800-4f5b-80ce-d59552445c3f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1327.467350] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9948daba-f10a-4ad0-8ba4-1c1bbf3bfef5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.474494] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1327.474494] env[69992]: value = "task-2897841" [ 1327.474494] env[69992]: _type = "Task" [ 1327.474494] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.480239] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1327.480477] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1327.482401] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10f12cd4-5f81-417f-ace9-7bea83e35d35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.489947] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897841, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.493386] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1327.493386] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5239a8c8-f381-a629-cc84-ecfccf52b2b2" [ 1327.493386] env[69992]: _type = "Task" [ 1327.493386] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.507973] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5239a8c8-f381-a629-cc84-ecfccf52b2b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.511340] env[69992]: DEBUG oslo_vmware.api [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226477} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.511655] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1327.511849] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1327.512083] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1327.512313] env[69992]: INFO nova.compute.manager [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1327.512581] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1327.512864] env[69992]: DEBUG nova.compute.manager [-] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1327.512948] env[69992]: DEBUG nova.network.neutron [-] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1327.795114] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897840, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.821354] env[69992]: DEBUG nova.scheduler.client.report [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1327.944733] env[69992]: DEBUG nova.network.neutron [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1327.991311] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897841, 'name': Rename_Task, 'duration_secs': 0.158767} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.994453] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1327.994855] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba050267-5807-4cd2-bcff-9d15ce0a5ff6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.009802] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5239a8c8-f381-a629-cc84-ecfccf52b2b2, 'name': SearchDatastore_Task, 'duration_secs': 0.012848} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.014329] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1328.014329] env[69992]: value = "task-2897842" [ 1328.014329] env[69992]: _type = "Task" [ 1328.014329] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.014329] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49dd8854-c757-43ff-829d-16cf004b982a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.027335] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1328.027335] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52778710-6d0a-f1bb-0ce1-32ebbd8491bc" [ 1328.027335] env[69992]: _type = "Task" [ 1328.027335] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.028043] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897842, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.037039] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52778710-6d0a-f1bb-0ce1-32ebbd8491bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.112669] env[69992]: DEBUG nova.network.neutron [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Updating instance_info_cache with network_info: [{"id": "fc9715b8-215a-4627-ab81-65fa9760790c", "address": "fa:16:3e:92:41:a5", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc9715b8-21", "ovs_interfaceid": "fc9715b8-215a-4627-ab81-65fa9760790c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.295580] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897840, 'name': CreateSnapshot_Task, 'duration_secs': 0.811507} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.296126] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1328.297021] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0db9b96-92bd-4e7e-bcae-0bc31ec87d89 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.402509] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1328.402744] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582089', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'name': 'volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e95e47c2-d82e-4153-8d16-7b65d992e91a', 'attached_at': '', 'detached_at': '', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'serial': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1328.403980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498d598c-309b-4f54-b225-3da4696f5070 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.426758] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332ab78e-4c5b-4f15-bdc1-3068d48cf7f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.457307] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74/volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1328.458008] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e673ca0-0fe6-4ab0-9e6e-ddfc7ef09a38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.477306] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1328.477306] env[69992]: value = "task-2897843" [ 1328.477306] env[69992]: _type = "Task" [ 1328.477306] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.486820] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897843, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.527043] env[69992]: DEBUG oslo_vmware.api [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897842, 'name': PowerOnVM_Task, 'duration_secs': 0.472289} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.527313] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1328.527516] env[69992]: INFO nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1328.527687] env[69992]: DEBUG nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1328.528519] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82423edc-246a-4e13-ab7e-41cdec1d8cf3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.545622] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52778710-6d0a-f1bb-0ce1-32ebbd8491bc, 'name': SearchDatastore_Task, 'duration_secs': 0.013972} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.546169] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1328.546423] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1328.546740] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57b4987a-5a4e-4c9b-9d30-680740bb96fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.554332] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1328.554332] env[69992]: value = "task-2897844" [ 1328.554332] env[69992]: _type = "Task" [ 1328.554332] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.564510] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.614979] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "refresh_cache-2b89e218-81cc-49fc-a80a-35dde48bdd5d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1328.615350] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Instance network_info: |[{"id": "fc9715b8-215a-4627-ab81-65fa9760790c", "address": "fa:16:3e:92:41:a5", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc9715b8-21", "ovs_interfaceid": "fc9715b8-215a-4627-ab81-65fa9760790c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1328.615798] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:41:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6fab536-1e48-4d07-992a-076f0e6d089c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc9715b8-215a-4627-ab81-65fa9760790c', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1328.625405] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1328.626022] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1328.626312] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c92fca69-4402-4584-9050-ba3676227e17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.653638] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1328.653638] env[69992]: value = "task-2897845" [ 1328.653638] env[69992]: _type = "Task" [ 1328.653638] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.664129] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897845, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.831128] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1328.831823] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ec46b889-5c49-4bdc-8461-cb569523816b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.840944] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.477s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.849492] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.543s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.851010] env[69992]: INFO nova.compute.claims [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1328.855844] env[69992]: DEBUG nova.compute.manager [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Received event network-changed-fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1328.856081] env[69992]: DEBUG nova.compute.manager [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Refreshing instance network info cache due to event network-changed-fc9715b8-215a-4627-ab81-65fa9760790c. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1328.856346] env[69992]: DEBUG oslo_concurrency.lockutils [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] Acquiring lock "refresh_cache-2b89e218-81cc-49fc-a80a-35dde48bdd5d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.856542] env[69992]: DEBUG oslo_concurrency.lockutils [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] Acquired lock "refresh_cache-2b89e218-81cc-49fc-a80a-35dde48bdd5d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1328.856745] env[69992]: DEBUG nova.network.neutron [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Refreshing network info cache for port fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1328.861844] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1328.861844] env[69992]: value = "task-2897846" [ 1328.861844] env[69992]: _type = "Task" [ 1328.861844] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.875721] env[69992]: DEBUG nova.network.neutron [-] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.879187] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897846, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.990375] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897843, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.054649] env[69992]: INFO nova.compute.manager [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Took 24.50 seconds to build instance. [ 1329.066469] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897844, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.170635] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897845, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.378701] env[69992]: INFO nova.compute.manager [-] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Took 1.87 seconds to deallocate network for instance. [ 1329.385716] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897846, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.450470] env[69992]: INFO nova.scheduler.client.report [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocation for migration d6ea2928-95fb-421c-896a-12bb1a9338e8 [ 1329.492421] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897843, 'name': ReconfigVM_Task, 'duration_secs': 0.515407} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.493038] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Reconfigured VM instance instance-00000057 to attach disk [datastore2] volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74/volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1329.498555] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8017ed13-c70d-471b-a6f0-533bab2c0932 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.522020] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1329.522020] env[69992]: value = "task-2897847" [ 1329.522020] env[69992]: _type = "Task" [ 1329.522020] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.530833] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.559905] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c137d5-5570-473d-9205-13dfbc5181d4 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "48558980-2800-4f5b-80ce-d59552445c3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.014s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.569563] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513103} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.569899] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1329.570182] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1329.570576] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7400ee05-b6a5-443c-982f-e4c6f0935500 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.577503] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1329.577503] env[69992]: value = "task-2897848" [ 1329.577503] env[69992]: _type = "Task" [ 1329.577503] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.587340] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897848, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.673695] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897845, 'name': CreateVM_Task, 'duration_secs': 0.560695} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.673875] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1329.677692] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.677692] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1329.677692] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1329.677692] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-932562df-a00b-46d9-85a1-22ff9445c3b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.684796] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1329.684796] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fd6540-3892-ba8b-2026-06257134fe13" [ 1329.684796] env[69992]: _type = "Task" [ 1329.684796] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.693892] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fd6540-3892-ba8b-2026-06257134fe13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.797110] env[69992]: DEBUG nova.network.neutron [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Updated VIF entry in instance network info cache for port fc9715b8-215a-4627-ab81-65fa9760790c. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1329.798336] env[69992]: DEBUG nova.network.neutron [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Updating instance_info_cache with network_info: [{"id": "fc9715b8-215a-4627-ab81-65fa9760790c", "address": "fa:16:3e:92:41:a5", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc9715b8-21", "ovs_interfaceid": "fc9715b8-215a-4627-ab81-65fa9760790c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.885681] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897846, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.892670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.960828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d66420e8-f2c6-4688-809f-6e6c242b9756 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.731s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.029899] env[69992]: DEBUG oslo_vmware.api [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897847, 'name': ReconfigVM_Task, 'duration_secs': 0.160777} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.032268] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582089', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'name': 'volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e95e47c2-d82e-4153-8d16-7b65d992e91a', 'attached_at': '', 'detached_at': '', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'serial': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1330.091743] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897848, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087341} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.091743] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1330.092320] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95dc85b7-90c3-472c-bf9c-8cdad043cc92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.121940] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.126174] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caf99089-2d82-4b46-b362-9c1a63f02fab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.147273] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1330.147273] env[69992]: value = "task-2897849" [ 1330.147273] env[69992]: _type = "Task" [ 1330.147273] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.155733] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.194893] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fd6540-3892-ba8b-2026-06257134fe13, 'name': SearchDatastore_Task, 'duration_secs': 0.009557} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.195299] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.195566] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1330.195821] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.195976] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1330.196224] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.198947] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77226ba4-4a5b-4882-a76b-4fbdcde89970 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.207995] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.208270] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1330.209298] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee0a6df-87fc-4a40-865b-147f62d99c98 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.215038] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109b0e2b-f349-40c7-9383-6468b401b704 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.220204] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1330.220204] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521579d9-9806-f84a-4569-cae2603b0e5c" [ 1330.220204] env[69992]: _type = "Task" [ 1330.220204] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.230488] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9113acad-afbb-41d1-8a7c-a4df503ecba1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.238933] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521579d9-9806-f84a-4569-cae2603b0e5c, 'name': SearchDatastore_Task, 'duration_secs': 0.011144} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.240022] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ca86c21-30a6-4ff7-93b3-8c7806f7912d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.272621] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078f59bc-a5cd-4190-bb42-3a512b562216 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.277395] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1330.277395] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5255210d-7314-774b-ed32-f06da1eb2d01" [ 1330.277395] env[69992]: _type = "Task" [ 1330.277395] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.284514] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966417e1-9c07-4578-adbc-f21aaf374490 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.293926] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5255210d-7314-774b-ed32-f06da1eb2d01, 'name': SearchDatastore_Task, 'duration_secs': 0.009565} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.293926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.293926] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 2b89e218-81cc-49fc-a80a-35dde48bdd5d/2b89e218-81cc-49fc-a80a-35dde48bdd5d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1330.293926] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fb84db0-5c6a-4417-9879-c31d8d1534e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.303116] env[69992]: DEBUG oslo_concurrency.lockutils [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] Releasing lock "refresh_cache-2b89e218-81cc-49fc-a80a-35dde48bdd5d" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.303444] env[69992]: DEBUG nova.compute.manager [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Received event network-vif-deleted-62dc8388-0e1c-4ec8-8f41-8e1feaa83858 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1330.303696] env[69992]: INFO nova.compute.manager [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Neutron deleted interface 62dc8388-0e1c-4ec8-8f41-8e1feaa83858; detaching it from the instance and deleting it from the info cache [ 1330.303926] env[69992]: DEBUG nova.network.neutron [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.305369] env[69992]: DEBUG nova.compute.provider_tree [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1330.312658] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1330.312658] env[69992]: value = "task-2897850" [ 1330.312658] env[69992]: _type = "Task" [ 1330.312658] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.320321] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.379279] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897846, 'name': CloneVM_Task, 'duration_secs': 1.363438} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.379621] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Created linked-clone VM from snapshot [ 1330.380584] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ddc98a-2040-4d4c-a7f7-4a50834769e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.389123] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Uploading image 2afccf79-1e06-45e0-bd6d-e1bf4c00e288 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1330.422378] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1330.422378] env[69992]: value = "vm-582093" [ 1330.422378] env[69992]: _type = "VirtualMachine" [ 1330.422378] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1330.422378] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f83e38ac-1420-4e2c-beb2-3a13e7d018c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.429411] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease: (returnval){ [ 1330.429411] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523ad675-906e-bcc2-9ad9-e69ae00adfc4" [ 1330.429411] env[69992]: _type = "HttpNfcLease" [ 1330.429411] env[69992]: } obtained for exporting VM: (result){ [ 1330.429411] env[69992]: value = "vm-582093" [ 1330.429411] env[69992]: _type = "VirtualMachine" [ 1330.429411] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1330.430134] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the lease: (returnval){ [ 1330.430134] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523ad675-906e-bcc2-9ad9-e69ae00adfc4" [ 1330.430134] env[69992]: _type = "HttpNfcLease" [ 1330.430134] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1330.438416] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1330.438416] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523ad675-906e-bcc2-9ad9-e69ae00adfc4" [ 1330.438416] env[69992]: _type = "HttpNfcLease" [ 1330.438416] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1330.605810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.605810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.605810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.605810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.605810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.606852] env[69992]: INFO nova.compute.manager [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Terminating instance [ 1330.662378] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897849, 'name': ReconfigVM_Task, 'duration_secs': 0.303676} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.662998] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1330.664089] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfc61851-db68-44ef-bc57-7667be23cf07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.673697] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1330.673697] env[69992]: value = "task-2897852" [ 1330.673697] env[69992]: _type = "Task" [ 1330.673697] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.689414] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897852, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.809169] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abac82ae-f777-48cc-bc07-ba42ced54908 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.814725] env[69992]: DEBUG nova.scheduler.client.report [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1330.831182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d92ad5-d71b-4ed2-8f09-2fc80b107d31 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.845992] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491954} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.846718] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 2b89e218-81cc-49fc-a80a-35dde48bdd5d/2b89e218-81cc-49fc-a80a-35dde48bdd5d.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1330.847121] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1330.847250] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7f47cf1-faff-4d15-8b89-39a781bc34f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.854168] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1330.854168] env[69992]: value = "task-2897853" [ 1330.854168] env[69992]: _type = "Task" [ 1330.854168] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.870529] env[69992]: DEBUG nova.compute.manager [req-8a4f76a4-67ff-4a96-93fc-f36781a508a4 req-c79ebe8c-2eec-4f2f-9901-e4e65426682d service nova] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Detach interface failed, port_id=62dc8388-0e1c-4ec8-8f41-8e1feaa83858, reason: Instance 45a00234-7ebf-4835-bad3-30474bb27148 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1330.876894] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.938704] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1330.938704] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523ad675-906e-bcc2-9ad9-e69ae00adfc4" [ 1330.938704] env[69992]: _type = "HttpNfcLease" [ 1330.938704] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1330.938704] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1330.938704] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523ad675-906e-bcc2-9ad9-e69ae00adfc4" [ 1330.938704] env[69992]: _type = "HttpNfcLease" [ 1330.938704] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1330.939447] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798f6f77-eedd-42d5-b4b4-3c901b9b67db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.946822] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528cfd47-d555-6628-9748-432896e9c3aa/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1330.946987] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528cfd47-d555-6628-9748-432896e9c3aa/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1331.077036] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-40e5673e-139e-4b8d-8079-b3d99e6279ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.078851] env[69992]: DEBUG nova.objects.instance [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'flavor' on Instance uuid e95e47c2-d82e-4153-8d16-7b65d992e91a {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1331.113192] env[69992]: DEBUG nova.compute.manager [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1331.114084] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1331.114660] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e329ca-ce10-40ef-b454-18f7460e1772 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.130405] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.130668] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f05dd7d-3359-4fa6-80f2-0a182b952858 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.140638] env[69992]: DEBUG oslo_vmware.api [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1331.140638] env[69992]: value = "task-2897854" [ 1331.140638] env[69992]: _type = "Task" [ 1331.140638] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.148826] env[69992]: DEBUG oslo_vmware.api [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.188533] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897852, 'name': Rename_Task, 'duration_secs': 0.240744} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.188533] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1331.188533] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-633155be-2cd6-4448-8cfa-c820ce38f891 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.195256] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.195533] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.202340] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1331.202340] env[69992]: value = "task-2897855" [ 1331.202340] env[69992]: _type = "Task" [ 1331.202340] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.212615] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.320402] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.320792] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1331.324231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.558s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.366336] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069824} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.367705] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1331.368579] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274a97a9-bf12-4cb4-b3ba-6b87ebd64f65 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.398721] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 2b89e218-81cc-49fc-a80a-35dde48bdd5d/2b89e218-81cc-49fc-a80a-35dde48bdd5d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1331.399568] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-636bcf93-7ae1-4d54-902b-84419571abde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.420767] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1331.420767] env[69992]: value = "task-2897856" [ 1331.420767] env[69992]: _type = "Task" [ 1331.420767] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.431305] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897856, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.588223] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9f0595-7250-4163-ad26-e93a341f8c44 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.789s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.651602] env[69992]: DEBUG oslo_vmware.api [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897854, 'name': PowerOffVM_Task, 'duration_secs': 0.292317} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.651901] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1331.652200] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1331.652707] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b9036bd-00d9-4764-b4d4-5af10d9412c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.704026] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1331.716478] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897855, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.746265] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1331.746265] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1331.746265] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore1] 31109fbd-ebc0-422d-a705-7d0e59d4bbb4 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1331.746265] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48ee2697-12f7-49d5-998e-b1c64eb3a382 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.754268] env[69992]: DEBUG oslo_vmware.api [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1331.754268] env[69992]: value = "task-2897858" [ 1331.754268] env[69992]: _type = "Task" [ 1331.754268] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.764763] env[69992]: DEBUG oslo_vmware.api [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.828147] env[69992]: DEBUG nova.compute.utils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1331.833714] env[69992]: INFO nova.compute.claims [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.840164] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1331.841888] env[69992]: DEBUG nova.network.neutron [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1331.900762] env[69992]: DEBUG nova.policy [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1331.934291] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897856, 'name': ReconfigVM_Task, 'duration_secs': 0.263401} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.934291] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 2b89e218-81cc-49fc-a80a-35dde48bdd5d/2b89e218-81cc-49fc-a80a-35dde48bdd5d.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.934291] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f7f996b-4f45-4115-96a2-596fae252942 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.938998] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1331.938998] env[69992]: value = "task-2897859" [ 1331.938998] env[69992]: _type = "Task" [ 1331.938998] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.957228] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897859, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.222516] env[69992]: DEBUG oslo_vmware.api [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897855, 'name': PowerOnVM_Task, 'duration_secs': 0.662672} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.225073] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.225073] env[69992]: INFO nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1332.225073] env[69992]: DEBUG nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1332.225073] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d742fd6-1f79-451d-83c5-a854b652cd66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.252130] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1332.266886] env[69992]: DEBUG oslo_vmware.api [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270727} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.267506] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1332.267765] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1332.267915] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1332.268348] env[69992]: INFO nova.compute.manager [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1332.268439] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1332.268693] env[69992]: DEBUG nova.compute.manager [-] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1332.269270] env[69992]: DEBUG nova.network.neutron [-] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1332.341697] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1332.345611] env[69992]: INFO nova.compute.resource_tracker [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating resource usage from migration fdae9135-789e-4e36-84ab-893429246875 [ 1332.456768] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897859, 'name': Rename_Task, 'duration_secs': 0.159628} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.457113] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1332.457478] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35d0a955-50c2-4d36-ab85-08b50aeec44b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.464968] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1332.464968] env[69992]: value = "task-2897860" [ 1332.464968] env[69992]: _type = "Task" [ 1332.464968] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.479622] env[69992]: DEBUG nova.network.neutron [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Successfully created port: f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1332.488095] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897860, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.734431] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678e5ebf-a618-4ae4-a5da-6432211a9d30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.750070] env[69992]: INFO nova.compute.manager [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Took 22.61 seconds to build instance. [ 1332.751991] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19907fb-08a2-4568-ad6f-5adf88faa445 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.757934] env[69992]: DEBUG nova.compute.manager [req-03bde51e-7fde-4b78-9a13-f19998320cee req-3a4aae08-a337-4245-b816-80f8b7e1cda8 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Received event network-vif-deleted-15455be6-d2df-46a9-bd15-7872eadb1ab6 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1332.758289] env[69992]: INFO nova.compute.manager [req-03bde51e-7fde-4b78-9a13-f19998320cee req-3a4aae08-a337-4245-b816-80f8b7e1cda8 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Neutron deleted interface 15455be6-d2df-46a9-bd15-7872eadb1ab6; detaching it from the instance and deleting it from the info cache [ 1332.758563] env[69992]: DEBUG nova.network.neutron [req-03bde51e-7fde-4b78-9a13-f19998320cee req-3a4aae08-a337-4245-b816-80f8b7e1cda8 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.791136] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94fe5ce-1855-402a-b440-aec89fcf3964 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.799197] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475a9c57-5152-4098-befa-eccda8a384db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.805683] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "48558980-2800-4f5b-80ce-d59552445c3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1332.806038] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "48558980-2800-4f5b-80ce-d59552445c3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1332.806329] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "48558980-2800-4f5b-80ce-d59552445c3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1332.806610] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "48558980-2800-4f5b-80ce-d59552445c3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1332.806869] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "48558980-2800-4f5b-80ce-d59552445c3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.816685] env[69992]: DEBUG nova.compute.provider_tree [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1332.818500] env[69992]: INFO nova.compute.manager [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Terminating instance [ 1332.979717] env[69992]: DEBUG oslo_vmware.api [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897860, 'name': PowerOnVM_Task, 'duration_secs': 0.471136} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.980087] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.980394] env[69992]: INFO nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Took 6.52 seconds to spawn the instance on the hypervisor. [ 1332.980591] env[69992]: DEBUG nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1332.981541] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fb1e29-9844-4094-9bb9-d9c8c304772c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.140707] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.143017] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.158958] env[69992]: DEBUG nova.network.neutron [-] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.257409] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fd3314d3-9a8a-4fe4-a3d0-5165a0836e27 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.130s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.262906] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98cf808e-b6a4-4085-8c7f-6eb0092fa312 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.274665] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ba9bf4-041d-4436-8360-c7406693d9b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.312121] env[69992]: DEBUG nova.compute.manager [req-03bde51e-7fde-4b78-9a13-f19998320cee req-3a4aae08-a337-4245-b816-80f8b7e1cda8 service nova] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Detach interface failed, port_id=15455be6-d2df-46a9-bd15-7872eadb1ab6, reason: Instance 31109fbd-ebc0-422d-a705-7d0e59d4bbb4 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1333.321523] env[69992]: DEBUG nova.scheduler.client.report [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1333.325476] env[69992]: DEBUG nova.compute.manager [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1333.325687] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.326721] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3358bbb6-14c9-46eb-a5a5-9c49c81f3777 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.336106] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1333.336394] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e3b20ab-c97e-4095-b13c-8c04efeaaee6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.343811] env[69992]: DEBUG oslo_vmware.api [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1333.343811] env[69992]: value = "task-2897861" [ 1333.343811] env[69992]: _type = "Task" [ 1333.343811] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.351844] env[69992]: DEBUG oslo_vmware.api [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.356100] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1333.391348] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1333.391606] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1333.391763] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1333.391965] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1333.392377] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1333.392651] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1333.392965] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1333.393216] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1333.393486] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1333.393752] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1333.394048] env[69992]: DEBUG nova.virt.hardware [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1333.394993] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943a5445-df6b-422a-b1d6-8130604b3a5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.403864] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c0e010-1804-4bb1-b112-aac0760157d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.501240] env[69992]: INFO nova.compute.manager [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Took 22.34 seconds to build instance. [ 1333.643687] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1333.663454] env[69992]: INFO nova.compute.manager [-] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Took 1.39 seconds to deallocate network for instance. [ 1333.827836] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.504s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.828075] env[69992]: INFO nova.compute.manager [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Migrating [ 1333.836728] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.479s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.840038] env[69992]: DEBUG nova.objects.instance [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lazy-loading 'resources' on Instance uuid eec50935-f553-43c7-b67b-7289299745bd {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.872774] env[69992]: DEBUG oslo_vmware.api [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897861, 'name': PowerOffVM_Task, 'duration_secs': 0.257889} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.873108] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1333.873294] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1333.873888] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaa2f9db-123e-48b5-ae21-0239f465668c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.941692] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1333.941936] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1333.942297] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Deleting the datastore file [datastore2] 48558980-2800-4f5b-80ce-d59552445c3f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1333.942659] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2e35899-11ec-4e8b-a665-601a3f2279af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.948988] env[69992]: DEBUG oslo_vmware.api [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1333.948988] env[69992]: value = "task-2897863" [ 1333.948988] env[69992]: _type = "Task" [ 1333.948988] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.957094] env[69992]: DEBUG oslo_vmware.api [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.002733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-274c8671-dea7-4d07-85bf-98f1dc229ab6 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.854s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.151903] env[69992]: DEBUG nova.compute.manager [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Received event network-changed-1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1334.152087] env[69992]: DEBUG nova.compute.manager [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Refreshing instance network info cache due to event network-changed-1f44518f-713e-4671-bc22-96c67ac28c8e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1334.152306] env[69992]: DEBUG oslo_concurrency.lockutils [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] Acquiring lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.152446] env[69992]: DEBUG oslo_concurrency.lockutils [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] Acquired lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1334.152631] env[69992]: DEBUG nova.network.neutron [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Refreshing network info cache for port 1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.170747] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.172857] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.354704] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.355017] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1334.355164] env[69992]: DEBUG nova.network.neutron [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.410356] env[69992]: DEBUG nova.network.neutron [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Successfully updated port: f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1334.462732] env[69992]: DEBUG oslo_vmware.api [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.462274} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.463018] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1334.463214] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1334.463395] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1334.463971] env[69992]: INFO nova.compute.manager [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1334.463971] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1334.464106] env[69992]: DEBUG nova.compute.manager [-] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1334.464138] env[69992]: DEBUG nova.network.neutron [-] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1334.715031] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf26fd7c-10d4-432e-9673-53fb57aff528 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.722586] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.722849] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.723920] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.723920] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.723920] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.725574] env[69992]: INFO nova.compute.manager [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Terminating instance [ 1334.727662] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8c2977-a1f4-4886-b05a-e4ed23fa64a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.766651] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81b3b81-31bf-4196-bbdd-9c8c94633e69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.775718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2a32b3-6cfb-46b6-8048-7173c401ffda {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.793390] env[69992]: DEBUG nova.compute.provider_tree [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1334.899939] env[69992]: DEBUG nova.network.neutron [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updated VIF entry in instance network info cache for port 1f44518f-713e-4671-bc22-96c67ac28c8e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1334.900316] env[69992]: DEBUG nova.network.neutron [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating instance_info_cache with network_info: [{"id": "1f44518f-713e-4671-bc22-96c67ac28c8e", "address": "fa:16:3e:ae:3c:2e", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f44518f-71", "ovs_interfaceid": "1f44518f-713e-4671-bc22-96c67ac28c8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.913969] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-cc8a809a-1a3b-4dad-a74b-d2f8d267b476" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.914142] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-cc8a809a-1a3b-4dad-a74b-d2f8d267b476" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1334.914294] env[69992]: DEBUG nova.network.neutron [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1335.159927] env[69992]: DEBUG nova.network.neutron [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.211611] env[69992]: DEBUG nova.network.neutron [-] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.236153] env[69992]: DEBUG nova.compute.manager [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1335.236457] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1335.237829] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9613875a-c7cd-48b4-a7fb-c239b63ab670 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.246361] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1335.246647] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5df3dba6-210c-42dd-8d54-b34e2733c1d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.255468] env[69992]: DEBUG oslo_vmware.api [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1335.255468] env[69992]: value = "task-2897864" [ 1335.255468] env[69992]: _type = "Task" [ 1335.255468] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.266271] env[69992]: DEBUG oslo_vmware.api [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.296953] env[69992]: DEBUG nova.scheduler.client.report [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1335.402659] env[69992]: DEBUG oslo_concurrency.lockutils [req-071bae4e-ce12-4e48-8cfe-5264ec08d9ce req-2f3d25ca-69ca-474b-857e-3946e3308835 service nova] Releasing lock "refresh_cache-9464339a-b760-47e9-bc75-e88ce18bf71b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.460353] env[69992]: DEBUG nova.network.neutron [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1335.666042] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.711121] env[69992]: DEBUG nova.network.neutron [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Updating instance_info_cache with network_info: [{"id": "f0f66e20-bfab-46ab-a70b-a4a982f63954", "address": "fa:16:3e:a9:3b:ce", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0f66e20-bf", "ovs_interfaceid": "f0f66e20-bfab-46ab-a70b-a4a982f63954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.714240] env[69992]: INFO nova.compute.manager [-] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Took 1.25 seconds to deallocate network for instance. [ 1335.767373] env[69992]: DEBUG oslo_vmware.api [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897864, 'name': PowerOffVM_Task, 'duration_secs': 0.284947} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.769926] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1335.769926] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1335.769926] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7732465e-f4b8-41ad-a65b-f4f829c3349d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.801872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.965s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.804890] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.808s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.806446] env[69992]: INFO nova.compute.claims [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1335.826320] env[69992]: INFO nova.scheduler.client.report [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Deleted allocations for instance eec50935-f553-43c7-b67b-7289299745bd [ 1335.842429] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1335.842677] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1335.842856] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleting the datastore file [datastore2] 2b89e218-81cc-49fc-a80a-35dde48bdd5d {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1335.843193] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-903ddb36-b4eb-4205-8f3f-22820f16a202 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.852549] env[69992]: DEBUG oslo_vmware.api [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1335.852549] env[69992]: value = "task-2897866" [ 1335.852549] env[69992]: _type = "Task" [ 1335.852549] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.860864] env[69992]: DEBUG oslo_vmware.api [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.178606] env[69992]: DEBUG nova.compute.manager [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Received event network-vif-plugged-f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1336.178824] env[69992]: DEBUG oslo_concurrency.lockutils [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] Acquiring lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.180203] env[69992]: DEBUG oslo_concurrency.lockutils [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.180203] env[69992]: DEBUG oslo_concurrency.lockutils [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.180203] env[69992]: DEBUG nova.compute.manager [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] No waiting events found dispatching network-vif-plugged-f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1336.180203] env[69992]: WARNING nova.compute.manager [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Received unexpected event network-vif-plugged-f0f66e20-bfab-46ab-a70b-a4a982f63954 for instance with vm_state building and task_state spawning. [ 1336.180203] env[69992]: DEBUG nova.compute.manager [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Received event network-changed-f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1336.180203] env[69992]: DEBUG nova.compute.manager [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Refreshing instance network info cache due to event network-changed-f0f66e20-bfab-46ab-a70b-a4a982f63954. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1336.180203] env[69992]: DEBUG oslo_concurrency.lockutils [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] Acquiring lock "refresh_cache-cc8a809a-1a3b-4dad-a74b-d2f8d267b476" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.213186] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-cc8a809a-1a3b-4dad-a74b-d2f8d267b476" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.213632] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Instance network_info: |[{"id": "f0f66e20-bfab-46ab-a70b-a4a982f63954", "address": "fa:16:3e:a9:3b:ce", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0f66e20-bf", "ovs_interfaceid": "f0f66e20-bfab-46ab-a70b-a4a982f63954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1336.214275] env[69992]: DEBUG oslo_concurrency.lockutils [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] Acquired lock "refresh_cache-cc8a809a-1a3b-4dad-a74b-d2f8d267b476" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.214877] env[69992]: DEBUG nova.network.neutron [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Refreshing network info cache for port f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1336.216326] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:3b:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0f66e20-bfab-46ab-a70b-a4a982f63954', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1336.226852] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1336.228614] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.228862] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1336.229434] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75c1867a-60a9-4401-a19f-211f97140386 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.252282] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1336.252282] env[69992]: value = "task-2897867" [ 1336.252282] env[69992]: _type = "Task" [ 1336.252282] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.261454] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897867, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.333906] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8e5d949-9c3b-43c0-b871-e3da964bff95 tempest-ServerRescueNegativeTestJSON-2055163366 tempest-ServerRescueNegativeTestJSON-2055163366-project-member] Lock "eec50935-f553-43c7-b67b-7289299745bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.714s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.363310] env[69992]: DEBUG oslo_vmware.api [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453655} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.363630] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1336.363822] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1336.363999] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1336.364186] env[69992]: INFO nova.compute.manager [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1336.364427] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1336.364612] env[69992]: DEBUG nova.compute.manager [-] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1336.364711] env[69992]: DEBUG nova.network.neutron [-] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1336.762812] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897867, 'name': CreateVM_Task, 'duration_secs': 0.332283} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.762812] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1336.763616] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.763697] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.763950] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1336.764225] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4becf14e-13c5-4f48-9e6c-829278a5b42c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.770927] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1336.770927] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fed501-8faf-4d60-82f2-5192b4b8ce7a" [ 1336.770927] env[69992]: _type = "Task" [ 1336.770927] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.780389] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fed501-8faf-4d60-82f2-5192b4b8ce7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.023960] env[69992]: DEBUG nova.network.neutron [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Updated VIF entry in instance network info cache for port f0f66e20-bfab-46ab-a70b-a4a982f63954. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1337.024335] env[69992]: DEBUG nova.network.neutron [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Updating instance_info_cache with network_info: [{"id": "f0f66e20-bfab-46ab-a70b-a4a982f63954", "address": "fa:16:3e:a9:3b:ce", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0f66e20-bf", "ovs_interfaceid": "f0f66e20-bfab-46ab-a70b-a4a982f63954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.090976] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4834269-43a9-4b5e-8722-fbf174145d94 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.098747] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c025b6-52b7-411a-b6a4-b7e6f7ead9d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.133881] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff75d70-a0d9-43a0-a93a-c067473dd6be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.140902] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53513262-50be-4d41-93b3-9d0f717066f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.144937] env[69992]: DEBUG nova.network.neutron [-] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.170214] env[69992]: DEBUG nova.compute.provider_tree [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.183385] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43974420-0b78-4371-a25f-486259fc5b4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.207025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1337.283877] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fed501-8faf-4d60-82f2-5192b4b8ce7a, 'name': SearchDatastore_Task, 'duration_secs': 0.026047} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.284555] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.284864] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.285125] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.285281] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.285430] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.285966] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf0e0710-653d-4a0d-9220-2236d5524964 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.295111] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.295312] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.296546] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22923545-6010-4c4a-9bbc-e005c992f29e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.302195] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1337.302195] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c4eb53-b8d5-72e6-01d8-30b45cacdd8d" [ 1337.302195] env[69992]: _type = "Task" [ 1337.302195] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.310399] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c4eb53-b8d5-72e6-01d8-30b45cacdd8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.527074] env[69992]: DEBUG oslo_concurrency.lockutils [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] Releasing lock "refresh_cache-cc8a809a-1a3b-4dad-a74b-d2f8d267b476" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.527383] env[69992]: DEBUG nova.compute.manager [req-be733357-088c-4054-81e9-3f4eee0d1aad req-c3160a62-b872-47de-9a56-3457f3da4652 service nova] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Received event network-vif-deleted-a4e3b7ab-de40-43e1-b9e2-222d0126cf7a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1337.650596] env[69992]: INFO nova.compute.manager [-] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Took 1.29 seconds to deallocate network for instance. [ 1337.674082] env[69992]: DEBUG nova.scheduler.client.report [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1337.713094] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1337.713247] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5635f854-8589-46b4-b68e-d1c572f59fef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.722275] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1337.722275] env[69992]: value = "task-2897868" [ 1337.722275] env[69992]: _type = "Task" [ 1337.722275] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.730695] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.815377] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c4eb53-b8d5-72e6-01d8-30b45cacdd8d, 'name': SearchDatastore_Task, 'duration_secs': 0.01891} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.815377] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-577247ef-37a8-4d08-8056-f4b1d69616fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.822180] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1337.822180] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5266e7fa-d587-77dd-f6e3-2c82e5804253" [ 1337.822180] env[69992]: _type = "Task" [ 1337.822180] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.830571] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5266e7fa-d587-77dd-f6e3-2c82e5804253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.158614] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1338.182044] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.182682] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1338.186374] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.596s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1338.188220] env[69992]: INFO nova.compute.claims [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1338.218284] env[69992]: DEBUG nova.compute.manager [req-b033d6a3-0c3a-44ea-808b-d717859eaccf req-717e9bcc-af6e-4786-be42-1999abc83414 service nova] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Received event network-vif-deleted-fc9715b8-215a-4627-ab81-65fa9760790c {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1338.233111] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897868, 'name': PowerOffVM_Task, 'duration_secs': 0.288301} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.233111] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1338.233282] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1338.332870] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5266e7fa-d587-77dd-f6e3-2c82e5804253, 'name': SearchDatastore_Task, 'duration_secs': 0.012714} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.333139] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.333451] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] cc8a809a-1a3b-4dad-a74b-d2f8d267b476/cc8a809a-1a3b-4dad-a74b-d2f8d267b476.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.333779] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76bcbcb4-e044-4195-81e8-89e73bc07679 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.341970] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1338.341970] env[69992]: value = "task-2897869" [ 1338.341970] env[69992]: _type = "Task" [ 1338.341970] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.351040] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.688704] env[69992]: DEBUG nova.compute.utils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1338.690475] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1338.691082] env[69992]: DEBUG nova.network.neutron [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.741328] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1338.741613] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1338.741807] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1338.742049] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1338.742245] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1338.742400] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1338.742619] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1338.742817] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1338.742988] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1338.743173] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1338.743352] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1338.751272] env[69992]: DEBUG nova.policy [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ede57670ddc4434a9ba4745870ddfa14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53308426a9c44f46b78a155e612ee5a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1338.752891] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2da3493d-0e35-4306-97e8-8bc063931c69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.775431] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1338.775431] env[69992]: value = "task-2897870" [ 1338.775431] env[69992]: _type = "Task" [ 1338.775431] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.786264] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897870, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.853385] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897869, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.192725] env[69992]: DEBUG nova.network.neutron [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Successfully created port: 1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1339.195236] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1339.288269] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897870, 'name': ReconfigVM_Task, 'duration_secs': 0.322537} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.291028] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1339.359708] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598623} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.359983] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] cc8a809a-1a3b-4dad-a74b-d2f8d267b476/cc8a809a-1a3b-4dad-a74b-d2f8d267b476.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.360167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.360410] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-741fb1da-6655-4430-b17c-f9333493ff5b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.369112] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1339.369112] env[69992]: value = "task-2897871" [ 1339.369112] env[69992]: _type = "Task" [ 1339.369112] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.378583] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.530386] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed22896-6ea3-44c1-b536-4003335c7fea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.538707] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557da5f9-2b4a-4df0-b420-7b135a3acf47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.572461] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d265299-55b7-47d1-954d-7d37772664aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.580519] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adcd49a-1dc7-4e94-94ce-b0e20bd16049 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.594786] env[69992]: DEBUG nova.compute.provider_tree [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.798097] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1339.798342] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.798516] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1339.798705] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.798852] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1339.799013] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1339.799235] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1339.799394] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1339.799593] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1339.799786] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1339.800178] env[69992]: DEBUG nova.virt.hardware [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1339.806376] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1339.807891] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b56dbad8-f8a9-4e4a-9978-10dc76507441 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.824341] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "b72eb094-b0fa-4e6f-bc29-c110692c7204" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.824559] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "b72eb094-b0fa-4e6f-bc29-c110692c7204" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.831643] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1339.831643] env[69992]: value = "task-2897872" [ 1339.831643] env[69992]: _type = "Task" [ 1339.831643] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.843362] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897872, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.879440] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06238} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.879825] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.880678] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36251b2a-2fd3-4b01-823e-41a46fbab543 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.905162] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] cc8a809a-1a3b-4dad-a74b-d2f8d267b476/cc8a809a-1a3b-4dad-a74b-d2f8d267b476.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.905500] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbc9bd8a-6d9d-4224-a024-95d2615cd123 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.927252] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1339.927252] env[69992]: value = "task-2897873" [ 1339.927252] env[69992]: _type = "Task" [ 1339.927252] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.935981] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.097805] env[69992]: DEBUG nova.scheduler.client.report [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1340.207160] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1340.236616] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1340.237371] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1340.237636] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1340.237851] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1340.238014] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1340.238187] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1340.238519] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1340.238753] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1340.239288] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1340.239288] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1340.239603] env[69992]: DEBUG nova.virt.hardware [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1340.240493] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628c6770-a526-4afb-afbb-4a552c75cc9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.251057] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da6436c-2a90-40bf-8fea-bea547b37c16 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.327241] env[69992]: DEBUG nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1340.341446] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897872, 'name': ReconfigVM_Task, 'duration_secs': 0.301503} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.342027] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1340.343053] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da23ae2-b703-404c-86c1-5897d2b827ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.370245] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.371172] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac79db80-c479-440a-9653-36cb3430fed4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.389858] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1340.389858] env[69992]: value = "task-2897874" [ 1340.389858] env[69992]: _type = "Task" [ 1340.389858] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.400017] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.437364] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897873, 'name': ReconfigVM_Task, 'duration_secs': 0.287245} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.437673] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Reconfigured VM instance instance-00000063 to attach disk [datastore1] cc8a809a-1a3b-4dad-a74b-d2f8d267b476/cc8a809a-1a3b-4dad-a74b-d2f8d267b476.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1340.438331] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a8d819a-7f5e-4fed-914d-4fabbe589b40 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.444018] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1340.444018] env[69992]: value = "task-2897875" [ 1340.444018] env[69992]: _type = "Task" [ 1340.444018] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.453426] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897875, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.603236] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.603852] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1340.606559] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.684s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.606783] env[69992]: DEBUG nova.objects.instance [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lazy-loading 'resources' on Instance uuid 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.790696] env[69992]: DEBUG nova.compute.manager [req-a0d69e8d-e8ed-4812-a683-933f5e3d0017 req-c3a2d285-9aa2-4252-8b9f-173d220c01e9 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Received event network-vif-plugged-1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1340.790950] env[69992]: DEBUG oslo_concurrency.lockutils [req-a0d69e8d-e8ed-4812-a683-933f5e3d0017 req-c3a2d285-9aa2-4252-8b9f-173d220c01e9 service nova] Acquiring lock "3f44442d-82b1-4669-8d65-0088d4a9babb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.791179] env[69992]: DEBUG oslo_concurrency.lockutils [req-a0d69e8d-e8ed-4812-a683-933f5e3d0017 req-c3a2d285-9aa2-4252-8b9f-173d220c01e9 service nova] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.791432] env[69992]: DEBUG oslo_concurrency.lockutils [req-a0d69e8d-e8ed-4812-a683-933f5e3d0017 req-c3a2d285-9aa2-4252-8b9f-173d220c01e9 service nova] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.791606] env[69992]: DEBUG nova.compute.manager [req-a0d69e8d-e8ed-4812-a683-933f5e3d0017 req-c3a2d285-9aa2-4252-8b9f-173d220c01e9 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] No waiting events found dispatching network-vif-plugged-1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1340.791795] env[69992]: WARNING nova.compute.manager [req-a0d69e8d-e8ed-4812-a683-933f5e3d0017 req-c3a2d285-9aa2-4252-8b9f-173d220c01e9 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Received unexpected event network-vif-plugged-1af32739-5591-409e-8d79-66f78e068c14 for instance with vm_state building and task_state spawning. [ 1340.851879] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.899274] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897874, 'name': ReconfigVM_Task, 'duration_secs': 0.387959} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.899612] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1340.899845] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1340.954681] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897875, 'name': Rename_Task, 'duration_secs': 0.218721} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.954681] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1340.954907] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-904d8742-7fd4-46b3-bc9f-7197c757bd10 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.961055] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1340.961055] env[69992]: value = "task-2897876" [ 1340.961055] env[69992]: _type = "Task" [ 1340.961055] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.969077] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.110912] env[69992]: DEBUG nova.compute.utils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1341.116587] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1341.116786] env[69992]: DEBUG nova.network.neutron [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1341.154492] env[69992]: DEBUG nova.policy [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1341.290332] env[69992]: DEBUG nova.network.neutron [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Successfully updated port: 1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1341.320163] env[69992]: DEBUG nova.compute.manager [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Received event network-changed-1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1341.320595] env[69992]: DEBUG nova.compute.manager [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Refreshing instance network info cache due to event network-changed-1af32739-5591-409e-8d79-66f78e068c14. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1341.320595] env[69992]: DEBUG oslo_concurrency.lockutils [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] Acquiring lock "refresh_cache-3f44442d-82b1-4669-8d65-0088d4a9babb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.320755] env[69992]: DEBUG oslo_concurrency.lockutils [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] Acquired lock "refresh_cache-3f44442d-82b1-4669-8d65-0088d4a9babb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1341.320859] env[69992]: DEBUG nova.network.neutron [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Refreshing network info cache for port 1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1341.408659] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c98e35-2382-4958-bfd5-9d266d23b19b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.435384] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177ba251-54e1-4e66-8540-918cbfdd6268 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.440251] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fac111-475d-4767-b4d2-1d7e8ff38cf8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.460341] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1341.464246] env[69992]: DEBUG nova.network.neutron [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Successfully created port: c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1341.466935] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d940ab4-3d22-4151-bc83-a41f77371104 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.479385] env[69992]: DEBUG oslo_vmware.api [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897876, 'name': PowerOnVM_Task, 'duration_secs': 0.443182} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.503659] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1341.503887] env[69992]: INFO nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1341.504298] env[69992]: DEBUG nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1341.505728] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9409f1c2-5862-4313-aa0c-b7d62cb9d9fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.508765] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de98570f-cbdd-4d37-bede-0d89d6138c77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.521803] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bf0832-c702-49be-9389-4b6fa3ee59b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.536920] env[69992]: DEBUG nova.compute.provider_tree [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.617480] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1341.795154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "refresh_cache-3f44442d-82b1-4669-8d65-0088d4a9babb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.885557] env[69992]: DEBUG nova.network.neutron [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1341.908525] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528cfd47-d555-6628-9748-432896e9c3aa/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1341.909446] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c3906a-ed7d-44b6-8b97-fd078acb65ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.916378] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528cfd47-d555-6628-9748-432896e9c3aa/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1341.916607] env[69992]: ERROR oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528cfd47-d555-6628-9748-432896e9c3aa/disk-0.vmdk due to incomplete transfer. [ 1341.916767] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-46f96ef6-285d-44b3-8095-879a71052629 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.924230] env[69992]: DEBUG oslo_vmware.rw_handles [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528cfd47-d555-6628-9748-432896e9c3aa/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1341.924422] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Uploaded image 2afccf79-1e06-45e0-bd6d-e1bf4c00e288 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1341.926831] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1341.929253] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f920ff82-5be4-48d5-a494-a5587b20ba60 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.935158] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1341.935158] env[69992]: value = "task-2897877" [ 1341.935158] env[69992]: _type = "Task" [ 1341.935158] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.947164] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897877, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.028851] env[69992]: INFO nova.compute.manager [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Took 26.74 seconds to build instance. [ 1342.032319] env[69992]: DEBUG nova.network.neutron [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.040730] env[69992]: DEBUG nova.scheduler.client.report [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1342.455187] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897877, 'name': Destroy_Task, 'duration_secs': 0.383343} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.455187] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Destroyed the VM [ 1342.455187] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1342.455485] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-92455d7e-aa43-45b0-ae8e-30221a44d185 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.465111] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1342.465111] env[69992]: value = "task-2897878" [ 1342.465111] env[69992]: _type = "Task" [ 1342.465111] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.475057] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897878, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.532373] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b11b4ad9-3476-4b99-b855-ebc3c424e050 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.257s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.535218] env[69992]: DEBUG oslo_concurrency.lockutils [req-c4e464f9-4d8d-4cb7-bfdc-8477a8e48aa5 req-3234f97a-cfe4-40a0-b58b-70931d19a165 service nova] Releasing lock "refresh_cache-3f44442d-82b1-4669-8d65-0088d4a9babb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.535722] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "refresh_cache-3f44442d-82b1-4669-8d65-0088d4a9babb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.535947] env[69992]: DEBUG nova.network.neutron [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1342.548942] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.551888] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.659s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.552163] env[69992]: DEBUG nova.objects.instance [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'resources' on Instance uuid 45a00234-7ebf-4835-bad3-30474bb27148 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1342.573604] env[69992]: INFO nova.scheduler.client.report [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Deleted allocations for instance 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9 [ 1342.627522] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1342.655084] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1342.656028] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.656028] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1342.656028] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.656028] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1342.656238] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1342.656414] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1342.656607] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1342.656807] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1342.656980] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1342.657181] env[69992]: DEBUG nova.virt.hardware [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1342.658355] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbebb3a-4daf-47d9-9b07-7f7e57ac3476 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.666382] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d99501-a3ec-45ec-b812-7e37dacddb84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.850363] env[69992]: DEBUG nova.compute.manager [req-5842fee9-c070-4acf-9d4a-42d7e47f0f14 req-3570aa79-8139-40d2-ae33-f4e6065f7895 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-vif-plugged-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1342.850664] env[69992]: DEBUG oslo_concurrency.lockutils [req-5842fee9-c070-4acf-9d4a-42d7e47f0f14 req-3570aa79-8139-40d2-ae33-f4e6065f7895 service nova] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.850914] env[69992]: DEBUG oslo_concurrency.lockutils [req-5842fee9-c070-4acf-9d4a-42d7e47f0f14 req-3570aa79-8139-40d2-ae33-f4e6065f7895 service nova] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.851132] env[69992]: DEBUG oslo_concurrency.lockutils [req-5842fee9-c070-4acf-9d4a-42d7e47f0f14 req-3570aa79-8139-40d2-ae33-f4e6065f7895 service nova] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.851471] env[69992]: DEBUG nova.compute.manager [req-5842fee9-c070-4acf-9d4a-42d7e47f0f14 req-3570aa79-8139-40d2-ae33-f4e6065f7895 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] No waiting events found dispatching network-vif-plugged-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1342.851878] env[69992]: WARNING nova.compute.manager [req-5842fee9-c070-4acf-9d4a-42d7e47f0f14 req-3570aa79-8139-40d2-ae33-f4e6065f7895 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received unexpected event network-vif-plugged-c35bf17a-173c-4013-b8e4-85b2415e8860 for instance with vm_state building and task_state spawning. [ 1342.930634] env[69992]: DEBUG nova.network.neutron [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Successfully updated port: c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1342.975451] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897878, 'name': RemoveSnapshot_Task, 'duration_secs': 0.356401} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.975769] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1342.976083] env[69992]: DEBUG nova.compute.manager [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1342.976880] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e507f49-efae-4a2b-b923-a70549361e81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.070872] env[69992]: DEBUG nova.network.neutron [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1343.081652] env[69992]: DEBUG oslo_concurrency.lockutils [None req-23567d34-cc1d-49df-805d-55f992622d3e tempest-AttachInterfacesV270Test-1173087170 tempest-AttachInterfacesV270Test-1173087170-project-member] Lock "1cf5a6d2-8ec9-429a-9c31-eb3c699389d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.687s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.194808] env[69992]: DEBUG nova.network.neutron [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Port 2c7ae122-41e5-4605-a33e-4516dd1f5945 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1343.207617] env[69992]: DEBUG nova.network.neutron [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Updating instance_info_cache with network_info: [{"id": "1af32739-5591-409e-8d79-66f78e068c14", "address": "fa:16:3e:9b:e1:9d", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1af32739-55", "ovs_interfaceid": "1af32739-5591-409e-8d79-66f78e068c14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.334546] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b08922-61f0-4ab3-9f6a-955419771fda {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.342480] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf554f4d-8282-48a6-9cde-12328d2a2fc2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.374524] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db38bfd9-ac4e-47f5-a955-6e9a6b58fd01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.381907] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f81177a-d8e0-4378-bffa-158256e077e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.135280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.135693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.135765] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.136067] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.136159] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.138021] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.138156] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.138307] env[69992]: DEBUG nova.network.neutron [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.149027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "refresh_cache-3f44442d-82b1-4669-8d65-0088d4a9babb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.149027] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Instance network_info: |[{"id": "1af32739-5591-409e-8d79-66f78e068c14", "address": "fa:16:3e:9b:e1:9d", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1af32739-55", "ovs_interfaceid": "1af32739-5591-409e-8d79-66f78e068c14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1344.149027] env[69992]: INFO nova.compute.manager [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Terminating instance [ 1344.149027] env[69992]: INFO nova.compute.manager [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Shelve offloading [ 1344.149334] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:e1:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1af32739-5591-409e-8d79-66f78e068c14', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.156265] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Creating folder: Project (53308426a9c44f46b78a155e612ee5a3). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.159489] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b762acea-48a2-4995-865a-72d3b3e4a6d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.170153] env[69992]: DEBUG nova.compute.provider_tree [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.182336] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Created folder: Project (53308426a9c44f46b78a155e612ee5a3) in parent group-v581821. [ 1344.182336] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Creating folder: Instances. Parent ref: group-v582095. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.182336] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-488806f6-df39-41b6-aead-1125709e1409 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.191512] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Created folder: Instances in parent group-v582095. [ 1344.191769] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1344.192514] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.192514] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e799b2a-4722-4657-8158-592f5df73094 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.213883] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.213883] env[69992]: value = "task-2897881" [ 1344.213883] env[69992]: _type = "Task" [ 1344.213883] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.226367] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897881, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.669412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.669412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.669412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.670593] env[69992]: DEBUG nova.compute.manager [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1344.670593] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1344.671394] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d80efa-f17e-4812-9b65-6756d9b64c1b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.674408] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.675228] env[69992]: DEBUG nova.scheduler.client.report [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1344.681241] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c52c8c11-d565-41fb-b5f4-3336410a5b92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.685998] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1344.685998] env[69992]: value = "task-2897882" [ 1344.685998] env[69992]: _type = "Task" [ 1344.685998] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.689520] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1344.692648] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-684e6c3d-e751-4b7b-8cbe-414271b23126 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.701009] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1344.701242] env[69992]: DEBUG nova.compute.manager [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1344.701578] env[69992]: DEBUG oslo_vmware.api [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1344.701578] env[69992]: value = "task-2897883" [ 1344.701578] env[69992]: _type = "Task" [ 1344.701578] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.702319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfef0d4-7544-4bf3-b584-20090a26cbe8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.708679] env[69992]: DEBUG nova.network.neutron [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1344.718666] env[69992]: DEBUG oslo_vmware.api [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.718802] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.719504] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.719504] env[69992]: DEBUG nova.network.neutron [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.729058] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897881, 'name': CreateVM_Task, 'duration_secs': 0.375928} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.729775] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.730485] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.730663] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.732268] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1344.732268] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1a253d0-912a-419b-bbfc-54a218ba82f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.735920] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1344.735920] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52045cff-3a73-a8e8-474f-e8d4af43019a" [ 1344.735920] env[69992]: _type = "Task" [ 1344.735920] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.748637] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52045cff-3a73-a8e8-474f-e8d4af43019a, 'name': SearchDatastore_Task, 'duration_secs': 0.009127} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.749064] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.749200] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1344.749444] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.749588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.749759] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.750049] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b68ee40-d6b9-4a99-977a-7c81aec47318 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.760035] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.760035] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1344.760035] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a78192f1-c03b-4735-aa5c-f53a3c834023 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.763911] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1344.763911] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d0f5bf-9947-820d-9337-db4f3898bc6c" [ 1344.763911] env[69992]: _type = "Task" [ 1344.763911] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.771900] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d0f5bf-9947-820d-9337-db4f3898bc6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.935506] env[69992]: DEBUG nova.network.neutron [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.956579] env[69992]: DEBUG nova.compute.manager [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1344.956709] env[69992]: DEBUG nova.compute.manager [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing instance network info cache due to event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1344.956900] env[69992]: DEBUG oslo_concurrency.lockutils [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.182860] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.631s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.185406] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.934s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.187035] env[69992]: INFO nova.compute.claims [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1345.218653] env[69992]: DEBUG oslo_vmware.api [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897883, 'name': PowerOffVM_Task, 'duration_secs': 0.192655} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.219116] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1345.219289] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1345.220440] env[69992]: INFO nova.scheduler.client.report [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocations for instance 45a00234-7ebf-4835-bad3-30474bb27148 [ 1345.221445] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19f87013-3313-4763-88c2-73870e6dbbd7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.276579] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d0f5bf-9947-820d-9337-db4f3898bc6c, 'name': SearchDatastore_Task, 'duration_secs': 0.008107} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.277413] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8acb9055-e2f7-4aa8-8010-d07758328791 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.288059] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1345.288059] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528d3586-3d28-415b-1847-033d3f780f62" [ 1345.288059] env[69992]: _type = "Task" [ 1345.288059] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.292740] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528d3586-3d28-415b-1847-033d3f780f62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.295311] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1345.295522] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1345.295689] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore1] cc8a809a-1a3b-4dad-a74b-d2f8d267b476 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1345.295943] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8815bff7-1553-4aed-ada0-a93fd2f45371 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.302884] env[69992]: DEBUG oslo_vmware.api [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1345.302884] env[69992]: value = "task-2897885" [ 1345.302884] env[69992]: _type = "Task" [ 1345.302884] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.310558] env[69992]: DEBUG oslo_vmware.api [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897885, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.438866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.439336] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Instance network_info: |[{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1345.439694] env[69992]: DEBUG oslo_concurrency.lockutils [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.439898] env[69992]: DEBUG nova.network.neutron [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1345.441279] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:6a:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c35bf17a-173c-4013-b8e4-85b2415e8860', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.449403] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1345.451796] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1345.452629] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a64d2a9e-0673-4e6f-aa20-27fba165ce05 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.472893] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.472893] env[69992]: value = "task-2897886" [ 1345.472893] env[69992]: _type = "Task" [ 1345.472893] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.480795] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897886, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.498268] env[69992]: DEBUG nova.network.neutron [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.702844] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.703091] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.703308] env[69992]: DEBUG nova.network.neutron [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1345.733689] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ff62a20-a044-4f68-986d-b50aff537b75 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "45a00234-7ebf-4835-bad3-30474bb27148" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.373s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.793622] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528d3586-3d28-415b-1847-033d3f780f62, 'name': SearchDatastore_Task, 'duration_secs': 0.010036} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.793937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.794282] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 3f44442d-82b1-4669-8d65-0088d4a9babb/3f44442d-82b1-4669-8d65-0088d4a9babb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.794584] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34579bf2-8e85-4370-bd90-1f8cdc4d8528 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.801354] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1345.801354] env[69992]: value = "task-2897887" [ 1345.801354] env[69992]: _type = "Task" [ 1345.801354] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.812341] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.815649] env[69992]: DEBUG oslo_vmware.api [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897885, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143447} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.815925] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1345.816152] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1345.816333] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1345.816507] env[69992]: INFO nova.compute.manager [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1345.816744] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1345.817283] env[69992]: DEBUG nova.compute.manager [-] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1345.817416] env[69992]: DEBUG nova.network.neutron [-] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1345.986470] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897886, 'name': CreateVM_Task, 'duration_secs': 0.472859} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.986644] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1345.987334] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.987500] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.987827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1345.988142] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76cca0b8-8fac-4f82-aaab-c3203efcd6f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.994255] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1345.994255] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528ca051-4892-321e-4193-73ecc7236a49" [ 1345.994255] env[69992]: _type = "Task" [ 1345.994255] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.003254] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1346.004695] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528ca051-4892-321e-4193-73ecc7236a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.254512] env[69992]: DEBUG nova.network.neutron [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updated VIF entry in instance network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1346.254512] env[69992]: DEBUG nova.network.neutron [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.324015] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.506935] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528ca051-4892-321e-4193-73ecc7236a49, 'name': SearchDatastore_Task, 'duration_secs': 0.012153} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.507308] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1346.507827] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.511433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.511433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1346.511548] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.514036] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1925e123-12af-4a2f-91ca-8ec23673edca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.521967] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.522211] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.522987] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-698e68ec-6241-4d98-95ce-8c27e94c0eef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.529162] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1346.529162] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524df8a5-8611-2b2b-eeb7-0d29fb56c717" [ 1346.529162] env[69992]: _type = "Task" [ 1346.529162] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.535449] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5622df8a-7346-48bb-a47b-11b32cf31048 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.543616] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524df8a5-8611-2b2b-eeb7-0d29fb56c717, 'name': SearchDatastore_Task, 'duration_secs': 0.010945} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.545701] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f54cabe-3ed8-425f-8fda-f40601c97c3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.551018] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8800a4-43ed-4752-a86b-bb4df31d2e64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.554907] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1346.554907] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c82b35-708c-25d0-fcdd-54ace8b0f1dc" [ 1346.554907] env[69992]: _type = "Task" [ 1346.554907] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.588549] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801110ef-5a3f-4aa7-a567-9d90ff652d7e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.594713] env[69992]: DEBUG nova.network.neutron [-] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.601854] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c82b35-708c-25d0-fcdd-54ace8b0f1dc, 'name': SearchDatastore_Task, 'duration_secs': 0.02637} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.602318] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1346.602585] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] fe3624b0-7d4a-4a16-83e3-3f28c2a74006/fe3624b0-7d4a-4a16-83e3-3f28c2a74006.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.603790] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f6a839-053f-4d06-8c96-0c15ac6151c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.607605] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff185e27-a465-4913-bb45-b39862b599b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.620375] env[69992]: DEBUG nova.compute.provider_tree [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.623656] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1346.623656] env[69992]: value = "task-2897888" [ 1346.623656] env[69992]: _type = "Task" [ 1346.623656] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.635355] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.729771] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.730710] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9ff372-b038-473d-bff9-b97a56fc0a67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.738402] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1346.738629] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1b3725b-dddb-4060-96b5-7adcc2055cde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.758711] env[69992]: DEBUG nova.network.neutron [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.760407] env[69992]: DEBUG oslo_concurrency.lockutils [req-c7cf183b-68c5-40da-9ba9-436ca109df2b req-1c94d7a3-12dd-4c7e-b895-9554c3976b0a service nova] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1346.803716] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1346.803948] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1346.804146] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleting the datastore file [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.804461] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d3c0b45-3fa0-4426-b676-1a883b78206e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.814289] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.815447] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1346.815447] env[69992]: value = "task-2897890" [ 1346.815447] env[69992]: _type = "Task" [ 1346.815447] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.822559] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.986364] env[69992]: DEBUG nova.compute.manager [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Received event network-vif-deleted-f0f66e20-bfab-46ab-a70b-a4a982f63954 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1346.986364] env[69992]: DEBUG nova.compute.manager [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-vif-unplugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1346.986555] env[69992]: DEBUG oslo_concurrency.lockutils [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.986854] env[69992]: DEBUG oslo_concurrency.lockutils [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.987123] env[69992]: DEBUG oslo_concurrency.lockutils [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.987365] env[69992]: DEBUG nova.compute.manager [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] No waiting events found dispatching network-vif-unplugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1346.987598] env[69992]: WARNING nova.compute.manager [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received unexpected event network-vif-unplugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 for instance with vm_state shelved and task_state shelving_offloading. [ 1346.987822] env[69992]: DEBUG nova.compute.manager [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1346.988080] env[69992]: DEBUG nova.compute.manager [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing instance network info cache due to event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1346.988362] env[69992]: DEBUG oslo_concurrency.lockutils [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.988580] env[69992]: DEBUG oslo_concurrency.lockutils [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1346.988944] env[69992]: DEBUG nova.network.neutron [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.025734] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.025997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.103276] env[69992]: INFO nova.compute.manager [-] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Took 1.29 seconds to deallocate network for instance. [ 1347.126694] env[69992]: DEBUG nova.scheduler.client.report [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1347.140273] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897888, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.261822] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.314401] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897887, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.326353] env[69992]: DEBUG oslo_vmware.api [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146861} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.326628] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.326788] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.326995] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.349163] env[69992]: INFO nova.scheduler.client.report [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted allocations for instance 08869f38-9609-4f7f-9110-2f26fd1cb3f7 [ 1347.531839] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1347.609492] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.637567] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.638128] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1347.641684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.470s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.642560] env[69992]: INFO nova.compute.claims [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1347.650717] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897888, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.729313] env[69992]: DEBUG nova.network.neutron [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updated VIF entry in instance network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1347.729818] env[69992]: DEBUG nova.network.neutron [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.771736] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2973b54-3324-47b2-b34e-f3931ed4bc03 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.778809] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f773d52-8ac5-44e3-962a-72b015ac56d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.814393] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897887, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.676924} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.814672] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 3f44442d-82b1-4669-8d65-0088d4a9babb/3f44442d-82b1-4669-8d65-0088d4a9babb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.814894] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.815164] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bb371f9-ef84-430a-b6a5-020ee69c3b7c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.823256] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1347.823256] env[69992]: value = "task-2897891" [ 1347.823256] env[69992]: _type = "Task" [ 1347.823256] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.832226] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.855668] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.051226] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.143288] env[69992]: DEBUG nova.compute.utils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1348.144491] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897888, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.144599] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1348.144973] env[69992]: DEBUG nova.network.neutron [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1348.183789] env[69992]: DEBUG nova.policy [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '044902c6075d41739188628ba5ebd58d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b51b8195c4e7418cbdaa66aa5e5aff5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1348.232813] env[69992]: DEBUG oslo_concurrency.lockutils [req-e887b1b5-ede0-4191-b19d-2cabfeef1278 req-0b96b4aa-a3a7-4ab2-b150-baad261e7793 service nova] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1348.333514] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061017} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.333864] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.334501] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7742a425-9ad9-41b4-a464-2966f94a6d30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.356513] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 3f44442d-82b1-4669-8d65-0088d4a9babb/3f44442d-82b1-4669-8d65-0088d4a9babb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.359683] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6de30f5-5bf3-4699-ad98-e065992f3391 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.378716] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1348.378716] env[69992]: value = "task-2897892" [ 1348.378716] env[69992]: _type = "Task" [ 1348.378716] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.387266] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.444322] env[69992]: DEBUG nova.network.neutron [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Successfully created port: 5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1348.643843] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897888, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.87342} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.644168] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] fe3624b0-7d4a-4a16-83e3-3f28c2a74006/fe3624b0-7d4a-4a16-83e3-3f28c2a74006.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1348.644400] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1348.644924] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1348.648152] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c15286b5-b798-4639-98f6-b22046b92acf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.658463] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1348.658463] env[69992]: value = "task-2897893" [ 1348.658463] env[69992]: _type = "Task" [ 1348.658463] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.669676] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.890350] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897892, 'name': ReconfigVM_Task, 'duration_secs': 0.366656} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.890645] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 3f44442d-82b1-4669-8d65-0088d4a9babb/3f44442d-82b1-4669-8d65-0088d4a9babb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1348.891314] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de666133-dfe7-4e6e-9fbe-657e3b05adca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.898582] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1348.898582] env[69992]: value = "task-2897894" [ 1348.898582] env[69992]: _type = "Task" [ 1348.898582] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.909826] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897894, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.911458] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8dba82-5ac2-4cf0-b7b2-5764afae5207 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.918172] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2279f10e-f070-49a3-862d-2c60bbaec4a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.922363] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc5b6c7-eb0b-45cb-9f41-44bfbea0d92e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.969008] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecccc7b8-ac02-48e7-bcdd-075db6027566 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.972943] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a02104-ee5d-48f3-b728-b5d8beb9d647 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.980085] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1348.987330] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91010eb7-49b1-4dfd-8533-96daa781aec3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.001547] env[69992]: DEBUG nova.compute.provider_tree [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.167867] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286307} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.168107] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.168789] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ce2f38-e09c-4efe-9bbb-eeacf66ebc36 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.191857] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] fe3624b0-7d4a-4a16-83e3-3f28c2a74006/fe3624b0-7d4a-4a16-83e3-3f28c2a74006.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.192815] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6693cfc6-a71b-4bfe-be3b-a6c24b51987d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.212320] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1349.212320] env[69992]: value = "task-2897895" [ 1349.212320] env[69992]: _type = "Task" [ 1349.212320] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.222410] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.409152] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897894, 'name': Rename_Task, 'duration_secs': 0.144349} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.409501] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.409660] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c5bce08-cef1-4c63-9a02-65a3d3d8c94c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.416208] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1349.416208] env[69992]: value = "task-2897896" [ 1349.416208] env[69992]: _type = "Task" [ 1349.416208] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.423891] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.488578] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1349.488904] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-491606fb-5214-444f-9b52-01698a187c2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.495728] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1349.495728] env[69992]: value = "task-2897897" [ 1349.495728] env[69992]: _type = "Task" [ 1349.495728] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.504298] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.505141] env[69992]: DEBUG nova.scheduler.client.report [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1349.660875] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1349.683008] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1349.683292] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.683473] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1349.683761] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.683964] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1349.684142] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1349.684378] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1349.684558] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1349.684756] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1349.684931] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1349.685152] env[69992]: DEBUG nova.virt.hardware [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1349.686781] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2f5b05-d864-4a1e-96a1-230b68a2757b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.695256] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378d8def-88e2-415f-af65-d637cf275090 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.725458] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897895, 'name': ReconfigVM_Task, 'duration_secs': 0.247212} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.725958] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Reconfigured VM instance instance-00000065 to attach disk [datastore2] fe3624b0-7d4a-4a16-83e3-3f28c2a74006/fe3624b0-7d4a-4a16-83e3-3f28c2a74006.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.726960] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3571e10-c957-4455-9656-beb2ac62325b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.734262] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1349.734262] env[69992]: value = "task-2897898" [ 1349.734262] env[69992]: _type = "Task" [ 1349.734262] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.742627] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897898, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.869968] env[69992]: DEBUG nova.compute.manager [req-906a0d95-230d-4df3-8fa2-5e89b15af4db req-81c1efb5-21da-4771-871a-982048dccb6d service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Received event network-vif-plugged-5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1349.870218] env[69992]: DEBUG oslo_concurrency.lockutils [req-906a0d95-230d-4df3-8fa2-5e89b15af4db req-81c1efb5-21da-4771-871a-982048dccb6d service nova] Acquiring lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.870394] env[69992]: DEBUG oslo_concurrency.lockutils [req-906a0d95-230d-4df3-8fa2-5e89b15af4db req-81c1efb5-21da-4771-871a-982048dccb6d service nova] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.870565] env[69992]: DEBUG oslo_concurrency.lockutils [req-906a0d95-230d-4df3-8fa2-5e89b15af4db req-81c1efb5-21da-4771-871a-982048dccb6d service nova] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1349.870756] env[69992]: DEBUG nova.compute.manager [req-906a0d95-230d-4df3-8fa2-5e89b15af4db req-81c1efb5-21da-4771-871a-982048dccb6d service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] No waiting events found dispatching network-vif-plugged-5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1349.870945] env[69992]: WARNING nova.compute.manager [req-906a0d95-230d-4df3-8fa2-5e89b15af4db req-81c1efb5-21da-4771-871a-982048dccb6d service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Received unexpected event network-vif-plugged-5b1da692-8443-4514-a168-fc8c34ae5b4a for instance with vm_state building and task_state spawning. [ 1349.926170] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897896, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.953704] env[69992]: DEBUG nova.network.neutron [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Successfully updated port: 5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1350.011331] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.012108] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1350.015938] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897897, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.016590] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.844s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.016859] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.019690] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.791s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.019993] env[69992]: DEBUG nova.objects.instance [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lazy-loading 'resources' on Instance uuid 48558980-2800-4f5b-80ce-d59552445c3f {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.042753] env[69992]: INFO nova.scheduler.client.report [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocations for instance 31109fbd-ebc0-422d-a705-7d0e59d4bbb4 [ 1350.244192] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897898, 'name': Rename_Task, 'duration_secs': 0.245001} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.244522] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.244800] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f380b06-5c69-4861-9b8d-ec824a7bb225 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.252028] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1350.252028] env[69992]: value = "task-2897899" [ 1350.252028] env[69992]: _type = "Task" [ 1350.252028] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.259763] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.426340] env[69992]: DEBUG oslo_vmware.api [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897896, 'name': PowerOnVM_Task, 'duration_secs': 0.591023} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.426682] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.426794] env[69992]: INFO nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Took 10.22 seconds to spawn the instance on the hypervisor. [ 1350.426972] env[69992]: DEBUG nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1350.427722] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc848bd-2cb8-4099-b50d-9ac6dc071c57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.456502] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "refresh_cache-91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.456725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "refresh_cache-91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.456822] env[69992]: DEBUG nova.network.neutron [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1350.506262] env[69992]: DEBUG oslo_vmware.api [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897897, 'name': PowerOnVM_Task, 'duration_secs': 0.516977} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.506514] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.506698] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-2f17ce8f-2f9f-4807-8a9d-4a31025adb24 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1350.518052] env[69992]: DEBUG nova.compute.utils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1350.518755] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1350.518927] env[69992]: DEBUG nova.network.neutron [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1350.552978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-edf29253-52b6-46f5-9832-28b4cbb7e264 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "31109fbd-ebc0-422d-a705-7d0e59d4bbb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.949s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.564024] env[69992]: DEBUG nova.policy [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94f19c179a3545089bcc66b7e5dc36e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4da04b8933ad4d2ba4b1c193853f31b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1350.573405] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.765080] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897899, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.771017] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7064becb-7c3a-42fe-bef2-61f20d1628cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.775382] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792e2a6c-e7a5-4dd9-b87e-dd499eaa974c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.808640] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b783040-582e-44f1-ab8d-4fc6629c3195 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.816321] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d467448e-8051-4ba8-b8fa-e6fd83f58604 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.830031] env[69992]: DEBUG nova.compute.provider_tree [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.838384] env[69992]: DEBUG nova.network.neutron [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Successfully created port: c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1350.949659] env[69992]: INFO nova.compute.manager [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Took 27.97 seconds to build instance. [ 1351.004993] env[69992]: DEBUG nova.network.neutron [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1351.022913] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1351.190132] env[69992]: DEBUG nova.network.neutron [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Updating instance_info_cache with network_info: [{"id": "5b1da692-8443-4514-a168-fc8c34ae5b4a", "address": "fa:16:3e:a2:1a:4a", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b1da692-84", "ovs_interfaceid": "5b1da692-8443-4514-a168-fc8c34ae5b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.263037] env[69992]: DEBUG oslo_vmware.api [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897899, 'name': PowerOnVM_Task, 'duration_secs': 0.91415} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.263247] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1351.263563] env[69992]: INFO nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1351.263696] env[69992]: DEBUG nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1351.264838] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94c653d-94ba-4a47-b8e7-65d817f3bd77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.334275] env[69992]: DEBUG nova.scheduler.client.report [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1351.452121] env[69992]: DEBUG oslo_concurrency.lockutils [None req-47e0d276-769b-499e-b723-002004e2921d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.489s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.692683] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "refresh_cache-91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.693146] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Instance network_info: |[{"id": "5b1da692-8443-4514-a168-fc8c34ae5b4a", "address": "fa:16:3e:a2:1a:4a", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b1da692-84", "ovs_interfaceid": "5b1da692-8443-4514-a168-fc8c34ae5b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1351.693956] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:1a:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b1da692-8443-4514-a168-fc8c34ae5b4a', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1351.701732] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1351.701985] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1351.702237] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a859d2eb-4683-478f-89e2-2a5a0456f948 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.723678] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1351.723678] env[69992]: value = "task-2897900" [ 1351.723678] env[69992]: _type = "Task" [ 1351.723678] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.731121] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897900, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.782947] env[69992]: INFO nova.compute.manager [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Took 28.21 seconds to build instance. [ 1351.841800] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.822s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.845026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.686s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1351.845136] env[69992]: DEBUG nova.objects.instance [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'resources' on Instance uuid 2b89e218-81cc-49fc-a80a-35dde48bdd5d {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1351.876235] env[69992]: INFO nova.scheduler.client.report [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Deleted allocations for instance 48558980-2800-4f5b-80ce-d59552445c3f [ 1351.935102] env[69992]: DEBUG nova.compute.manager [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Received event network-changed-5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1351.935102] env[69992]: DEBUG nova.compute.manager [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Refreshing instance network info cache due to event network-changed-5b1da692-8443-4514-a168-fc8c34ae5b4a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1351.935102] env[69992]: DEBUG oslo_concurrency.lockutils [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] Acquiring lock "refresh_cache-91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.935102] env[69992]: DEBUG oslo_concurrency.lockutils [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] Acquired lock "refresh_cache-91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1351.935102] env[69992]: DEBUG nova.network.neutron [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Refreshing network info cache for port 5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.036537] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1352.063429] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1352.063672] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1352.063832] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1352.064022] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1352.064175] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1352.064324] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1352.064531] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1352.064696] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1352.064862] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1352.065036] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1352.065260] env[69992]: DEBUG nova.virt.hardware [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1352.072588] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154ef0da-a9ab-4f2a-b7f2-148b249e2748 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.084681] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8325bff2-646e-40ad-8e40-47605dbff5b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.235983] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897900, 'name': CreateVM_Task, 'duration_secs': 0.347327} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.235983] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1352.236412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.236582] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.236913] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1352.237190] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1fa9fd8-8971-46c9-885c-1808eb70d6f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.244781] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1352.244781] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52999de2-3230-3601-7765-1e53eda0d15a" [ 1352.244781] env[69992]: _type = "Task" [ 1352.244781] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.258966] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52999de2-3230-3601-7765-1e53eda0d15a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.285964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec747ec6-d1d1-4581-b831-e86fc95d9df7 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.721s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.385256] env[69992]: DEBUG oslo_concurrency.lockutils [None req-daf04fec-6db2-47b6-9ad3-00c2a9f1c52e tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "48558980-2800-4f5b-80ce-d59552445c3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.578s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.497272] env[69992]: DEBUG nova.network.neutron [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Successfully updated port: c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1352.666585] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44488126-532d-463c-ae61-6647b1bba668 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.674718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3491be13-aaba-44c4-8abb-bd93698782ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.709103] env[69992]: DEBUG nova.network.neutron [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Updated VIF entry in instance network info cache for port 5b1da692-8443-4514-a168-fc8c34ae5b4a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1352.709500] env[69992]: DEBUG nova.network.neutron [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Updating instance_info_cache with network_info: [{"id": "5b1da692-8443-4514-a168-fc8c34ae5b4a", "address": "fa:16:3e:a2:1a:4a", "network": {"id": "0655b1a1-e1ee-4efd-889c-0f72915310f5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1445509008-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b51b8195c4e7418cbdaa66aa5e5aff5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b1da692-84", "ovs_interfaceid": "5b1da692-8443-4514-a168-fc8c34ae5b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.712364] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128d6f3f-d094-4a29-acdc-6331e2133a81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.722112] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2879ca-ad4a-4a0f-b90d-85bc33f5b2de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.740132] env[69992]: DEBUG nova.compute.provider_tree [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.755994] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52999de2-3230-3601-7765-1e53eda0d15a, 'name': SearchDatastore_Task, 'duration_secs': 0.023361} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.755994] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1352.756403] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1352.756403] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.756513] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.756692] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1352.757404] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d2ef3f9-0ec5-4c20-b7a9-12490636d146 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.768019] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1352.768019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1352.768176] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-813c277d-9b4b-4699-a6f2-1225b558174f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.774255] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1352.774255] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52269b25-eafd-be60-4ff6-f81f44856e4e" [ 1352.774255] env[69992]: _type = "Task" [ 1352.774255] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.782217] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52269b25-eafd-be60-4ff6-f81f44856e4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.812668] env[69992]: DEBUG nova.compute.manager [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1352.812668] env[69992]: DEBUG nova.compute.manager [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing instance network info cache due to event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1352.812756] env[69992]: DEBUG oslo_concurrency.lockutils [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.813582] env[69992]: DEBUG oslo_concurrency.lockutils [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.813582] env[69992]: DEBUG nova.network.neutron [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.003123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.003123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1353.003123] env[69992]: DEBUG nova.network.neutron [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.216844] env[69992]: DEBUG oslo_concurrency.lockutils [req-30f52f3a-9dd3-41f2-839d-19a04ea9d045 req-414b196f-516d-4be0-99ab-b3a70bf643a5 service nova] Releasing lock "refresh_cache-91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.245512] env[69992]: DEBUG nova.scheduler.client.report [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1353.286029] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52269b25-eafd-be60-4ff6-f81f44856e4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009862} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.286835] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9760001-a925-49d7-a4b7-3e57030ddf0a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.291783] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1353.291783] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ff68de-eaa2-feaf-71cb-2cc3e0ba3f38" [ 1353.291783] env[69992]: _type = "Task" [ 1353.291783] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.299218] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ff68de-eaa2-feaf-71cb-2cc3e0ba3f38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.360502] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.363024] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.529540] env[69992]: DEBUG nova.network.neutron [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Port 2c7ae122-41e5-4605-a33e-4516dd1f5945 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1353.529832] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.529973] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1353.530146] env[69992]: DEBUG nova.network.neutron [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.550736] env[69992]: DEBUG nova.network.neutron [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1353.568107] env[69992]: DEBUG nova.network.neutron [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updated VIF entry in instance network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1353.568487] env[69992]: DEBUG nova.network.neutron [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.736718] env[69992]: DEBUG nova.compute.manager [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1353.737661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae290579-3291-435b-a289-f0813136862f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.753073] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.755184] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.903s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.757344] env[69992]: INFO nova.compute.claims [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1353.776402] env[69992]: INFO nova.scheduler.client.report [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted allocations for instance 2b89e218-81cc-49fc-a80a-35dde48bdd5d [ 1353.780659] env[69992]: DEBUG nova.network.neutron [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Updating instance_info_cache with network_info: [{"id": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "address": "fa:16:3e:ab:9f:f9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc41aefad-ec", "ovs_interfaceid": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.805845] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ff68de-eaa2-feaf-71cb-2cc3e0ba3f38, 'name': SearchDatastore_Task, 'duration_secs': 0.029639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.806069] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.806353] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3/91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1353.806985] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc76fbfc-ace5-4692-b9ce-d87933f75453 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.813319] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1353.813319] env[69992]: value = "task-2897902" [ 1353.813319] env[69992]: _type = "Task" [ 1353.813319] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.821612] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.874483] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1353.874483] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.960644] env[69992]: DEBUG nova.compute.manager [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Received event network-vif-plugged-c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1353.960905] env[69992]: DEBUG oslo_concurrency.lockutils [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] Acquiring lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.961107] env[69992]: DEBUG oslo_concurrency.lockutils [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.961297] env[69992]: DEBUG oslo_concurrency.lockutils [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.961508] env[69992]: DEBUG nova.compute.manager [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] No waiting events found dispatching network-vif-plugged-c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1353.961695] env[69992]: WARNING nova.compute.manager [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Received unexpected event network-vif-plugged-c41aefad-ecba-4fa1-ae2e-2586734ffa8a for instance with vm_state building and task_state spawning. [ 1353.961859] env[69992]: DEBUG nova.compute.manager [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Received event network-changed-c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1353.962030] env[69992]: DEBUG nova.compute.manager [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Refreshing instance network info cache due to event network-changed-c41aefad-ecba-4fa1-ae2e-2586734ffa8a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1353.962206] env[69992]: DEBUG oslo_concurrency.lockutils [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] Acquiring lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.073264] env[69992]: DEBUG oslo_concurrency.lockutils [req-3bbaa2e5-8c29-48dd-ba3f-4c43072dde10 req-02f802b5-92cb-4b18-a8b6-1804d1d9fe3e service nova] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1354.248807] env[69992]: INFO nova.compute.manager [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] instance snapshotting [ 1354.251628] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3414d3-102b-4ca9-90fc-eb73beb9693a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.282907] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a95d26-4c6a-4df0-abed-4e9460a2abc6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.289091] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1354.289091] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Instance network_info: |[{"id": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "address": "fa:16:3e:ab:9f:f9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc41aefad-ec", "ovs_interfaceid": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1354.289482] env[69992]: DEBUG oslo_concurrency.lockutils [None req-928ee948-8347-44a5-bba9-6332fe725808 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "2b89e218-81cc-49fc-a80a-35dde48bdd5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.567s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.290412] env[69992]: DEBUG oslo_concurrency.lockutils [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] Acquired lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.290633] env[69992]: DEBUG nova.network.neutron [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Refreshing network info cache for port c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1354.291692] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:9f:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c41aefad-ecba-4fa1-ae2e-2586734ffa8a', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.300623] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1354.302591] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1354.304449] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ddc1ae3-4663-4ae0-b44b-c1d69fcd3f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.336558] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897902, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.338318] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.338318] env[69992]: value = "task-2897903" [ 1354.338318] env[69992]: _type = "Task" [ 1354.338318] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.349334] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897903, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.377566] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1354.663523] env[69992]: DEBUG nova.network.neutron [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.828283] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1354.828557] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e24fff14-a268-4379-8f31-d419c7caaef9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.839225] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897902, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.844383} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.839554] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1354.839554] env[69992]: value = "task-2897904" [ 1354.839554] env[69992]: _type = "Task" [ 1354.839554] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.841868] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3/91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1354.842120] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1354.845895] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2c626dc-9172-409e-b823-3c11505b7878 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.862637] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897903, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.868814] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897904, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.869353] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1354.869353] env[69992]: value = "task-2897905" [ 1354.869353] env[69992]: _type = "Task" [ 1354.869353] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.885771] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.927355] env[69992]: DEBUG nova.compute.manager [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1354.927539] env[69992]: DEBUG nova.compute.manager [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing instance network info cache due to event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1354.927753] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.927894] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.928063] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1355.054386] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67445d15-6fee-4c56-9d36-a5882074a2c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.067665] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3766324-99f7-489d-9c43-238f8a4ea2e1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.100251] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856e62ba-6517-4c30-a04a-b0284ac8cebd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.108170] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240771b2-71bf-4a2b-80f7-9f0343f7ffbb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.115392] env[69992]: DEBUG nova.network.neutron [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Updated VIF entry in instance network info cache for port c41aefad-ecba-4fa1-ae2e-2586734ffa8a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.115392] env[69992]: DEBUG nova.network.neutron [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Updating instance_info_cache with network_info: [{"id": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "address": "fa:16:3e:ab:9f:f9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc41aefad-ec", "ovs_interfaceid": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.126332] env[69992]: DEBUG nova.compute.provider_tree [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1355.169581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1355.246148] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-6ccc70f5-4857-4af3-99a1-f60ec35aebaf-3d571c52-27cf-411e-86f3-279b842e93ca" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.246446] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-6ccc70f5-4857-4af3-99a1-f60ec35aebaf-3d571c52-27cf-411e-86f3-279b842e93ca" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.246917] env[69992]: DEBUG nova.objects.instance [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'flavor' on Instance uuid 6ccc70f5-4857-4af3-99a1-f60ec35aebaf {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.355662] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897904, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.358973] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897903, 'name': CreateVM_Task, 'duration_secs': 0.859876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.359254] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1355.359992] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.360221] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.360613] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1355.360983] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09c79ed1-3629-437f-bcda-4470db7393e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.365159] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1355.365159] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528c03cc-fd0c-8640-4bef-5d5ff24e902c" [ 1355.365159] env[69992]: _type = "Task" [ 1355.365159] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.374236] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528c03cc-fd0c-8640-4bef-5d5ff24e902c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.381272] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068453} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.381618] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1355.382402] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8faa043-9ae0-43a7-8fbe-5ef5ee9cd6f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.404758] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3/91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1355.405384] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f3d9c69-b14c-4776-884f-0efb461ce4ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.425504] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1355.425504] env[69992]: value = "task-2897906" [ 1355.425504] env[69992]: _type = "Task" [ 1355.425504] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.435446] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897906, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.617562] env[69992]: DEBUG oslo_concurrency.lockutils [req-b644242f-9803-42c9-b678-606f181cd4c7 req-fd3b2e82-880f-412b-8e72-8d14cc921e1b service nova] Releasing lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1355.630530] env[69992]: DEBUG nova.scheduler.client.report [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1355.674931] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updated VIF entry in instance network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.675325] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.677369] env[69992]: DEBUG nova.compute.manager [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69992) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1355.841703] env[69992]: DEBUG nova.objects.instance [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'pci_requests' on Instance uuid 6ccc70f5-4857-4af3-99a1-f60ec35aebaf {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.852257] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897904, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.874818] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528c03cc-fd0c-8640-4bef-5d5ff24e902c, 'name': SearchDatastore_Task, 'duration_secs': 0.029021} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.875101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1355.875339] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.875576] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.875724] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.875903] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1355.876165] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08ad0e47-3545-4670-bb11-f84585101c9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.884735] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1355.884913] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1355.885804] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d1d51f-0614-4acb-9559-789a752c952a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.890336] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1355.890336] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522c298a-8883-e03a-2516-a2a4e6e5e84b" [ 1355.890336] env[69992]: _type = "Task" [ 1355.890336] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.897512] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522c298a-8883-e03a-2516-a2a4e6e5e84b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.934639] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897906, 'name': ReconfigVM_Task, 'duration_secs': 0.267277} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.934925] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3/91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1355.935569] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38b598f6-4aac-4fcf-8c93-d907b43cd920 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.941695] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1355.941695] env[69992]: value = "task-2897907" [ 1355.941695] env[69992]: _type = "Task" [ 1355.941695] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.949459] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897907, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.136125] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.136646] env[69992]: DEBUG nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1356.139509] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.530s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.139698] env[69992]: DEBUG nova.objects.instance [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'resources' on Instance uuid cc8a809a-1a3b-4dad-a74b-d2f8d267b476 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1356.180548] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.180836] env[69992]: DEBUG nova.compute.manager [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1356.181018] env[69992]: DEBUG nova.compute.manager [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing instance network info cache due to event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1356.181235] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.181380] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.181637] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.344148] env[69992]: DEBUG nova.objects.base [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Object Instance<6ccc70f5-4857-4af3-99a1-f60ec35aebaf> lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1356.344392] env[69992]: DEBUG nova.network.neutron [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1356.356898] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897904, 'name': CreateSnapshot_Task, 'duration_secs': 1.395058} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.357178] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1356.357909] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7199fd-cb67-488a-9ada-be0b516c0c74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.400796] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522c298a-8883-e03a-2516-a2a4e6e5e84b, 'name': SearchDatastore_Task, 'duration_secs': 0.010289} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.401623] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66654f41-25bf-4938-bfe6-42db6a1cd87a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.406884] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1356.406884] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c2aa4f-f3ce-7ba3-ed6c-88fbb079ca5d" [ 1356.406884] env[69992]: _type = "Task" [ 1356.406884] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.414598] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c2aa4f-f3ce-7ba3-ed6c-88fbb079ca5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.415999] env[69992]: DEBUG nova.policy [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1356.450632] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897907, 'name': Rename_Task, 'duration_secs': 0.14313} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.450894] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1356.451137] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-040adcd1-5aae-4043-8470-8f5cc3b9251d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.457159] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1356.457159] env[69992]: value = "task-2897908" [ 1356.457159] env[69992]: _type = "Task" [ 1356.457159] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.465750] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.534961] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.535229] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.535498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "57702674-4c96-4577-a93f-24ecffebb3a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.535878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.536504] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.538365] env[69992]: INFO nova.compute.manager [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Terminating instance [ 1356.642708] env[69992]: DEBUG nova.compute.utils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1356.647864] env[69992]: DEBUG nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1356.775664] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.874962] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1356.879912] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8df8a52e-3f1e-4cd6-8ef5-2150887d54a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.889356] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1356.889356] env[69992]: value = "task-2897909" [ 1356.889356] env[69992]: _type = "Task" [ 1356.889356] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.894214] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081a3afc-7dce-4293-9da2-39b84184c0c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.901365] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897909, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.903952] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afe003b-d50c-434c-a1c6-655b5707b933 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.936266] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updated VIF entry in instance network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1356.936642] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.938731] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055a428b-cd52-4b80-b979-5b4100950e97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.946649] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c2aa4f-f3ce-7ba3-ed6c-88fbb079ca5d, 'name': SearchDatastore_Task, 'duration_secs': 0.019202} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.947878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.948230] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 5c8b5f76-918a-44ac-b5b4-5f5f252da936/5c8b5f76-918a-44ac-b5b4-5f5f252da936.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1356.948750] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47d82a20-481b-4ee6-8d32-8cf27f1ffe2c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.954661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37208af-4a51-4d68-b9df-e556f8922b21 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.959979] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1356.959979] env[69992]: value = "task-2897910" [ 1356.959979] env[69992]: _type = "Task" [ 1356.959979] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.974399] env[69992]: DEBUG nova.compute.provider_tree [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.978744] env[69992]: DEBUG oslo_vmware.api [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897908, 'name': PowerOnVM_Task, 'duration_secs': 0.43186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.979250] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.979478] env[69992]: INFO nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Took 7.32 seconds to spawn the instance on the hypervisor. [ 1356.979655] env[69992]: DEBUG nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.980457] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f2b0e7-8e30-41f4-bcec-504b44b3714f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.986778] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.042879] env[69992]: DEBUG nova.compute.manager [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1357.043282] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1357.044652] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77226a8-ba36-41b0-b8a7-dc6f0a32cc3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.055264] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.055565] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-774e7f7c-f614-4180-a39e-4beea82c20b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.062314] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1357.062314] env[69992]: value = "task-2897911" [ 1357.062314] env[69992]: _type = "Task" [ 1357.062314] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.070334] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.149044] env[69992]: DEBUG nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1357.404241] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897909, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.443179] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1357.443493] env[69992]: DEBUG nova.compute.manager [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1357.443688] env[69992]: DEBUG nova.compute.manager [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing instance network info cache due to event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1357.443932] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.444133] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.444340] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1357.473922] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496465} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.474261] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 5c8b5f76-918a-44ac-b5b4-5f5f252da936/5c8b5f76-918a-44ac-b5b4-5f5f252da936.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1357.474495] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1357.474801] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-263cae28-247f-46c3-9339-2246f4ce89f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.480924] env[69992]: DEBUG nova.scheduler.client.report [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1357.485984] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1357.485984] env[69992]: value = "task-2897912" [ 1357.485984] env[69992]: _type = "Task" [ 1357.485984] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.500820] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.504863] env[69992]: INFO nova.compute.manager [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Took 25.29 seconds to build instance. [ 1357.574141] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897911, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.900747] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897909, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.987382] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.990783] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.135s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.991388] env[69992]: DEBUG nova.objects.instance [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'resources' on Instance uuid 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.002606] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069792} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.005331] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1358.007063] env[69992]: DEBUG nova.compute.manager [req-62e61d02-1ffc-448a-8751-36290c28e713 req-b32d3a30-ce7f-4630-83d0-ccda0bb1f66a service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-vif-plugged-3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1358.007600] env[69992]: DEBUG oslo_concurrency.lockutils [req-62e61d02-1ffc-448a-8751-36290c28e713 req-b32d3a30-ce7f-4630-83d0-ccda0bb1f66a service nova] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.008007] env[69992]: DEBUG oslo_concurrency.lockutils [req-62e61d02-1ffc-448a-8751-36290c28e713 req-b32d3a30-ce7f-4630-83d0-ccda0bb1f66a service nova] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.009274] env[69992]: DEBUG oslo_concurrency.lockutils [req-62e61d02-1ffc-448a-8751-36290c28e713 req-b32d3a30-ce7f-4630-83d0-ccda0bb1f66a service nova] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.009274] env[69992]: DEBUG nova.compute.manager [req-62e61d02-1ffc-448a-8751-36290c28e713 req-b32d3a30-ce7f-4630-83d0-ccda0bb1f66a service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] No waiting events found dispatching network-vif-plugged-3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1358.009274] env[69992]: WARNING nova.compute.manager [req-62e61d02-1ffc-448a-8751-36290c28e713 req-b32d3a30-ce7f-4630-83d0-ccda0bb1f66a service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received unexpected event network-vif-plugged-3d571c52-27cf-411e-86f3-279b842e93ca for instance with vm_state active and task_state None. [ 1358.010088] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572e7206-4eb5-40e2-a9dc-31e430211422 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.013582] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc4009b0-881b-4bf3-bce2-43bdbec67dcd tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.818s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.014559] env[69992]: INFO nova.scheduler.client.report [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance cc8a809a-1a3b-4dad-a74b-d2f8d267b476 [ 1358.038789] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 5c8b5f76-918a-44ac-b5b4-5f5f252da936/5c8b5f76-918a-44ac-b5b4-5f5f252da936.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.042183] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22a162a2-7513-47dc-bfda-ea40e55c499e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.064674] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1358.064674] env[69992]: value = "task-2897913" [ 1358.064674] env[69992]: _type = "Task" [ 1358.064674] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.075780] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897911, 'name': PowerOffVM_Task, 'duration_secs': 0.515966} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.078760] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1358.078944] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1358.079505] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897913, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.079730] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-203a9617-193f-4e58-857c-e426fcaec57e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.100398] env[69992]: DEBUG nova.network.neutron [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Successfully updated port: 3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1358.157777] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1358.157777] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1358.157777] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Deleting the datastore file [datastore1] 57702674-4c96-4577-a93f-24ecffebb3a7 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1358.158961] env[69992]: DEBUG nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1358.160896] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a8f3f8e-d82b-472d-b980-779538f49cb1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.168926] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for the task: (returnval){ [ 1358.168926] env[69992]: value = "task-2897915" [ 1358.168926] env[69992]: _type = "Task" [ 1358.168926] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.177239] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.191351] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1358.191796] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1358.192097] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1358.192390] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1358.192639] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1358.192889] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1358.193214] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1358.193495] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1358.193769] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1358.194048] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1358.194361] env[69992]: DEBUG nova.virt.hardware [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1358.195584] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ce4474-3197-4b2b-8062-398d446f2a5d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.204179] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80973b33-3a9a-46cd-b944-9ad8b52e7b15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.219877] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1358.225949] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Creating folder: Project (a08c5599d7de4337a5cb4d0227152aca). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1358.226582] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88cc89ed-6730-4d4b-ab49-08f7f806af33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.231027] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updated VIF entry in instance network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1358.231533] env[69992]: DEBUG nova.network.neutron [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.235923] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Created folder: Project (a08c5599d7de4337a5cb4d0227152aca) in parent group-v581821. [ 1358.236041] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Creating folder: Instances. Parent ref: group-v582103. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1358.236297] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4080d56b-822d-4a31-9fd6-5381b2a338d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.245267] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Created folder: Instances in parent group-v582103. [ 1358.245413] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.245633] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1358.245776] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1dd164a8-6473-4ea5-af63-456349650269 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.264443] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1358.264443] env[69992]: value = "task-2897918" [ 1358.264443] env[69992]: _type = "Task" [ 1358.264443] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.272062] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897918, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.401822] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897909, 'name': CloneVM_Task, 'duration_secs': 1.267236} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.402175] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Created linked-clone VM from snapshot [ 1358.403080] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46637cb2-bbe8-4c05-9216-eee838405be6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.411643] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Uploading image eb34cd4b-f844-48db-8e4f-b669483d8f75 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1358.423239] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1358.423544] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-16cacbee-2e83-4481-94a9-65a654ec73df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.429634] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1358.429634] env[69992]: value = "task-2897919" [ 1358.429634] env[69992]: _type = "Task" [ 1358.429634] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.437497] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897919, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.498721] env[69992]: DEBUG nova.objects.instance [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'numa_topology' on Instance uuid 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.522888] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4166c9a-5e73-45a6-8b36-7c88ce5df80f tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "cc8a809a-1a3b-4dad-a74b-d2f8d267b476" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.387s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.577457] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897913, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.605308] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.678316] env[69992]: DEBUG oslo_vmware.api [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Task: {'id': task-2897915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292584} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.678567] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.678756] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1358.678933] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1358.679119] env[69992]: INFO nova.compute.manager [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1358.679379] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.679716] env[69992]: DEBUG nova.compute.manager [-] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1358.679846] env[69992]: DEBUG nova.network.neutron [-] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1358.734431] env[69992]: DEBUG oslo_concurrency.lockutils [req-6446bd84-b1da-460a-8e1b-841ed45d1def req-0e374b72-5d0b-4f24-ab98-1c9e2f5093a7 service nova] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1358.734939] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1358.735173] env[69992]: DEBUG nova.network.neutron [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1358.774242] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897918, 'name': CreateVM_Task, 'duration_secs': 0.297092} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.774396] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1358.774773] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.774934] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1358.775271] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1358.775523] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfe08133-b87e-4eac-a055-5ccbf28f4e79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.780158] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1358.780158] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52487492-5b59-df7f-72fa-fd32eb207e2b" [ 1358.780158] env[69992]: _type = "Task" [ 1358.780158] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.788566] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52487492-5b59-df7f-72fa-fd32eb207e2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.939354] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897919, 'name': Destroy_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.003084] env[69992]: DEBUG nova.objects.base [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Object Instance<08869f38-9609-4f7f-9110-2f26fd1cb3f7> lazy-loaded attributes: resources,numa_topology {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1359.080664] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897913, 'name': ReconfigVM_Task, 'duration_secs': 0.517243} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.083341] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 5c8b5f76-918a-44ac-b5b4-5f5f252da936/5c8b5f76-918a-44ac-b5b4-5f5f252da936.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1359.084175] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5247c8f-ca42-4c9b-b5f7-0ca9cd9bade4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.091236] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1359.091236] env[69992]: value = "task-2897920" [ 1359.091236] env[69992]: _type = "Task" [ 1359.091236] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.104065] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897920, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.250345] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4527f560-7155-4f6f-a405-89bc5ccfce7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.258643] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4f4fb6-f81b-4451-b7a9-0d1c9f72acab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.290874] env[69992]: WARNING nova.network.neutron [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] 7c8e9b14-bcc2-45f2-8b37-5f478b75057e already exists in list: networks containing: ['7c8e9b14-bcc2-45f2-8b37-5f478b75057e']. ignoring it [ 1359.296395] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3d79c2-578d-4669-9b7f-cef3c37cf7bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.305366] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52487492-5b59-df7f-72fa-fd32eb207e2b, 'name': SearchDatastore_Task, 'duration_secs': 0.025243} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.307458] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.307624] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1359.307864] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.308025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.308245] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1359.308516] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d00170c2-93d8-403f-9532-10f1f414752f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.311177] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b659df5f-59b4-43e6-98d6-043a487a84a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.326019] env[69992]: DEBUG nova.compute.provider_tree [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.331338] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1359.331338] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1359.331338] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3615d4e4-c79b-4a09-a757-9d90c7befc4a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.337467] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1359.337467] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fde5d9-a3df-0b32-5d07-6dc90727e8a4" [ 1359.337467] env[69992]: _type = "Task" [ 1359.337467] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.347357] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fde5d9-a3df-0b32-5d07-6dc90727e8a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.439519] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897919, 'name': Destroy_Task, 'duration_secs': 0.697461} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.439645] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Destroyed the VM [ 1359.439864] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1359.440134] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0b7fdbfb-925c-45c9-ae15-d7579a01a3b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.446705] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1359.446705] env[69992]: value = "task-2897921" [ 1359.446705] env[69992]: _type = "Task" [ 1359.446705] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.454671] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897921, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.560081] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.560359] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.560608] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.560872] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.561116] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.563806] env[69992]: INFO nova.compute.manager [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Terminating instance [ 1359.584477] env[69992]: DEBUG nova.network.neutron [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d571c52-27cf-411e-86f3-279b842e93ca", "address": "fa:16:3e:5b:0e:7b", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d571c52-27", "ovs_interfaceid": "3d571c52-27cf-411e-86f3-279b842e93ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.601078] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897920, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.662258] env[69992]: DEBUG nova.network.neutron [-] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.831375] env[69992]: DEBUG nova.scheduler.client.report [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1359.847494] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fde5d9-a3df-0b32-5d07-6dc90727e8a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009123} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.848893] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56def084-0dd0-451a-aaca-0e06efd1457f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.854724] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1359.854724] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5245e7c8-3a98-6500-b0d1-739f5388c5ad" [ 1359.854724] env[69992]: _type = "Task" [ 1359.854724] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.863059] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5245e7c8-3a98-6500-b0d1-739f5388c5ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.883196] env[69992]: DEBUG nova.compute.manager [req-2e3e8621-c246-4e61-8646-80d97b56cfdb req-678e27dc-2401-4fce-a359-312dd02e5464 service nova] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Received event network-vif-deleted-942293fd-c866-4331-b9d4-f667536a039b {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1359.956753] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897921, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.068802] env[69992]: DEBUG nova.compute.manager [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1360.068802] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1360.069608] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662df2d9-3088-4b57-b326-77318729e06e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.077409] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1360.077677] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2ad34de-1200-4786-bd57-dfd1423d4873 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.083607] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1360.083607] env[69992]: value = "task-2897922" [ 1360.083607] env[69992]: _type = "Task" [ 1360.083607] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.086966] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.087569] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.087727] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1360.088432] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b374628-42ab-408c-b688-eb1a6eb558a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.095484] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.110674] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1360.110939] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.111114] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1360.111303] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.111451] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1360.111625] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1360.111843] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1360.112040] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1360.112335] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1360.113185] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1360.113185] env[69992]: DEBUG nova.virt.hardware [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1360.122304] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Reconfiguring VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1360.123101] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41c746e8-a1d7-41f1-92d1-b3d90260cb09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.141259] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897920, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.142588] env[69992]: DEBUG oslo_vmware.api [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1360.142588] env[69992]: value = "task-2897923" [ 1360.142588] env[69992]: _type = "Task" [ 1360.142588] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.151249] env[69992]: DEBUG oslo_vmware.api [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897923, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.165075] env[69992]: INFO nova.compute.manager [-] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Took 1.49 seconds to deallocate network for instance. [ 1360.336257] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.345s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.339400] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.288s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.341018] env[69992]: INFO nova.compute.claims [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1360.369151] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5245e7c8-3a98-6500-b0d1-739f5388c5ad, 'name': SearchDatastore_Task, 'duration_secs': 0.038626} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.370032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.370032] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1360.370541] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e18d6be-5358-4634-8a05-7b788104bb3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.379958] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1360.379958] env[69992]: value = "task-2897924" [ 1360.379958] env[69992]: _type = "Task" [ 1360.379958] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.391220] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.457030] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897921, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.523690] env[69992]: DEBUG nova.compute.manager [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-changed-3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1360.523917] env[69992]: DEBUG nova.compute.manager [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing instance network info cache due to event network-changed-3d571c52-27cf-411e-86f3-279b842e93ca. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1360.524171] env[69992]: DEBUG oslo_concurrency.lockutils [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.524351] env[69992]: DEBUG oslo_concurrency.lockutils [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1360.524498] env[69992]: DEBUG nova.network.neutron [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing network info cache for port 3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1360.596287] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897922, 'name': PowerOffVM_Task, 'duration_secs': 0.242817} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.599824] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1360.600013] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1360.600658] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78346ba8-d0aa-4dad-afd4-d65412d0ac4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.609794] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897920, 'name': Rename_Task, 'duration_secs': 1.168213} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.610124] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1360.610388] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-953f4638-ef92-4108-aaec-fc9d1b1518bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.618679] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1360.618679] env[69992]: value = "task-2897926" [ 1360.618679] env[69992]: _type = "Task" [ 1360.618679] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.628270] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.656739] env[69992]: DEBUG oslo_vmware.api [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.675021] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.676258] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1360.676485] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1360.676662] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore1] d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.676933] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab8cb4a5-76c1-495a-9ef2-5ece37cada66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.683834] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1360.683834] env[69992]: value = "task-2897927" [ 1360.683834] env[69992]: _type = "Task" [ 1360.683834] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.693027] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.852479] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8d247880-9729-4b12-900c-e7ab6198cc17 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 35.671s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.853599] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 10.280s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.853761] env[69992]: INFO nova.compute.manager [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Unshelving [ 1360.890977] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897924, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.957655] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897921, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.129523] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897926, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.156641] env[69992]: DEBUG oslo_vmware.api [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897923, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.193211] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.239658] env[69992]: DEBUG nova.network.neutron [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updated VIF entry in instance network info cache for port 3d571c52-27cf-411e-86f3-279b842e93ca. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1361.240181] env[69992]: DEBUG nova.network.neutron [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d571c52-27cf-411e-86f3-279b842e93ca", "address": "fa:16:3e:5b:0e:7b", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d571c52-27", "ovs_interfaceid": "3d571c52-27cf-411e-86f3-279b842e93ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.391082] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512104} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.391384] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1361.391770] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1361.391989] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f822f384-75cc-4a83-97b0-a583f0dd2a56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.401530] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1361.401530] env[69992]: value = "task-2897928" [ 1361.401530] env[69992]: _type = "Task" [ 1361.401530] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.412851] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897928, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.456880] env[69992]: DEBUG oslo_vmware.api [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897921, 'name': RemoveSnapshot_Task, 'duration_secs': 1.701664} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.457184] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1361.465780] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.466068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.466286] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.466468] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.466638] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.468768] env[69992]: INFO nova.compute.manager [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Terminating instance [ 1361.589237] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa249c2-aea9-480c-a34d-c843ca380bdf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.596770] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8e477a-c2d3-469c-9979-bc4459dbd2d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.630483] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e788f38-b23f-41ca-8e43-cf4d7fa3b811 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.638070] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897926, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.641134] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1c03ac-ce5f-47f7-aad5-5dd4edfc08ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.652164] env[69992]: DEBUG oslo_vmware.api [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897923, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.659803] env[69992]: DEBUG nova.compute.provider_tree [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.693734] env[69992]: DEBUG oslo_vmware.api [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.575415} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.693986] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.694200] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1361.694382] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1361.694555] env[69992]: INFO nova.compute.manager [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1361.694795] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1361.694987] env[69992]: DEBUG nova.compute.manager [-] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1361.695095] env[69992]: DEBUG nova.network.neutron [-] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1361.743920] env[69992]: DEBUG oslo_concurrency.lockutils [req-d9c86ca3-c169-40f0-bc01-4da5ea3bcb5c req-8c0fb07a-eaa9-48c5-a8c8-6c0797ee4a5d service nova] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1361.882637] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.911450] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897928, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070242} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.911796] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1361.912795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe1e914-a22d-41c0-a6d0-2c0720d0e713 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.933625] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1361.934426] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2be8712d-0e71-4ad5-81f6-20d6766d87c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.956825] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1361.956825] env[69992]: value = "task-2897929" [ 1361.956825] env[69992]: _type = "Task" [ 1361.956825] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.963426] env[69992]: WARNING nova.compute.manager [None req-526d1824-b71a-498e-9f68-9cbfed67c8d9 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Image not found during snapshot: nova.exception.ImageNotFound: Image eb34cd4b-f844-48db-8e4f-b669483d8f75 could not be found. [ 1361.967518] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.972565] env[69992]: DEBUG nova.compute.manager [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1361.972810] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1361.973614] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315919e1-3066-40a6-8c4e-1ef5e3a8a78f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.982349] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1361.982610] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7770ea90-4efd-44f4-9551-501cd0df4315 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.992371] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1361.992371] env[69992]: value = "task-2897930" [ 1361.992371] env[69992]: _type = "Task" [ 1361.992371] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.002367] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.035246] env[69992]: DEBUG nova.compute.manager [req-82ec9ec0-6831-487c-a38d-5c817c60f485 req-be25dc16-0a82-4094-9a6b-62779a180da2 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Received event network-vif-deleted-9fcd8287-e476-4c07-87e5-ec3dcbad7449 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1362.035416] env[69992]: INFO nova.compute.manager [req-82ec9ec0-6831-487c-a38d-5c817c60f485 req-be25dc16-0a82-4094-9a6b-62779a180da2 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Neutron deleted interface 9fcd8287-e476-4c07-87e5-ec3dcbad7449; detaching it from the instance and deleting it from the info cache [ 1362.035593] env[69992]: DEBUG nova.network.neutron [req-82ec9ec0-6831-487c-a38d-5c817c60f485 req-be25dc16-0a82-4094-9a6b-62779a180da2 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.137456] env[69992]: DEBUG oslo_vmware.api [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2897926, 'name': PowerOnVM_Task, 'duration_secs': 1.175742} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.137456] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.137456] env[69992]: INFO nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Took 10.10 seconds to spawn the instance on the hypervisor. [ 1362.137808] env[69992]: DEBUG nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1362.138697] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce76130-6d80-4a96-8b1d-a1d20bb6bb2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.155567] env[69992]: DEBUG oslo_vmware.api [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897923, 'name': ReconfigVM_Task, 'duration_secs': 1.917181} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.156121] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.156385] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Reconfigured VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1362.164059] env[69992]: DEBUG nova.scheduler.client.report [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1362.452570] env[69992]: DEBUG nova.network.neutron [-] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.466740] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897929, 'name': ReconfigVM_Task, 'duration_secs': 0.283471} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.467078] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Reconfigured VM instance instance-00000068 to attach disk [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.467688] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5681aaf3-7d20-483c-905a-18f8ac52fa0e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.474739] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1362.474739] env[69992]: value = "task-2897931" [ 1362.474739] env[69992]: _type = "Task" [ 1362.474739] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.483084] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897931, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.498881] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897930, 'name': PowerOffVM_Task, 'duration_secs': 0.238996} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.499607] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1362.499830] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1362.500166] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4cfa1859-6879-43c4-b5d4-92b3d4640e64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.538564] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcd45872-384e-4514-9c6c-1dcf3a9296bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.550956] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e7b78b-f670-47e9-82f9-47e4d7f040de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.592030] env[69992]: DEBUG nova.compute.manager [req-82ec9ec0-6831-487c-a38d-5c817c60f485 req-be25dc16-0a82-4094-9a6b-62779a180da2 service nova] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Detach interface failed, port_id=9fcd8287-e476-4c07-87e5-ec3dcbad7449, reason: Instance d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1362.593204] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1362.593408] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1362.593587] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleting the datastore file [datastore1] 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1362.593836] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2ca5d06-5424-40b9-a019-44290af71816 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.597505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "3f44442d-82b1-4669-8d65-0088d4a9babb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.597730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.597926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "3f44442d-82b1-4669-8d65-0088d4a9babb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.598124] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.598291] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.600904] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for the task: (returnval){ [ 1362.600904] env[69992]: value = "task-2897933" [ 1362.600904] env[69992]: _type = "Task" [ 1362.600904] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.601342] env[69992]: INFO nova.compute.manager [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Terminating instance [ 1362.611647] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.658873] env[69992]: INFO nova.compute.manager [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Took 28.51 seconds to build instance. [ 1362.661207] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34e474c3-ee57-499e-93da-b2b3ef277bb2 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-6ccc70f5-4857-4af3-99a1-f60ec35aebaf-3d571c52-27cf-411e-86f3-279b842e93ca" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.414s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.668394] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.668868] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1362.671154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.294s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.671335] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.671509] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1362.672131] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 5.896s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.674087] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9304ae77-fcf7-4c79-9c41-baf0625e20f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.682844] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8d7dcd-23ff-4652-a639-5223f0b2b132 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.698531] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8842efa-ed29-456f-89ea-aded071d11ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.705374] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d63b693-05c4-449c-90e4-39465898d64a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.734529] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179383MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1362.734693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.954928] env[69992]: INFO nova.compute.manager [-] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Took 1.26 seconds to deallocate network for instance. [ 1362.996042] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897931, 'name': Rename_Task, 'duration_secs': 0.173743} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.996421] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1362.996577] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14f88e84-0377-4a62-bb12-91d8e65d6c11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.008110] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1363.008110] env[69992]: value = "task-2897934" [ 1363.008110] env[69992]: _type = "Task" [ 1363.008110] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.016134] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.107690] env[69992]: DEBUG nova.compute.manager [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1363.107909] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1363.108691] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d745986-dd72-4d50-a8a6-bf00a72c15c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.116684] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.118753] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1363.118982] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-204d0c5d-8016-4711-afc5-332a6a18f4b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.124472] env[69992]: DEBUG oslo_vmware.api [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1363.124472] env[69992]: value = "task-2897935" [ 1363.124472] env[69992]: _type = "Task" [ 1363.124472] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.132392] env[69992]: DEBUG oslo_vmware.api [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.162598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-12b711ff-45f7-48a5-9a4f-071d7f821505 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.021s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.174970] env[69992]: DEBUG nova.compute.utils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1363.176398] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1363.176625] env[69992]: DEBUG nova.network.neutron [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1363.179094] env[69992]: DEBUG nova.objects.instance [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'migration_context' on Instance uuid 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.233213] env[69992]: DEBUG nova.policy [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d2ee1abedf4874bcb44b4076199da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b8716c4b7324052a3472734c655655a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1363.462591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.521684] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897934, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.531028] env[69992]: DEBUG nova.network.neutron [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Successfully created port: bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.613477] env[69992]: DEBUG oslo_vmware.api [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Task: {'id': task-2897933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.584407} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.613769] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1363.614029] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1363.614212] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1363.614410] env[69992]: INFO nova.compute.manager [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1363.614669] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1363.614875] env[69992]: DEBUG nova.compute.manager [-] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1363.615015] env[69992]: DEBUG nova.network.neutron [-] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1363.625647] env[69992]: DEBUG nova.compute.manager [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Received event network-changed-c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1363.625840] env[69992]: DEBUG nova.compute.manager [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Refreshing instance network info cache due to event network-changed-c41aefad-ecba-4fa1-ae2e-2586734ffa8a. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1363.627125] env[69992]: DEBUG oslo_concurrency.lockutils [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] Acquiring lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.627379] env[69992]: DEBUG oslo_concurrency.lockutils [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] Acquired lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1363.627598] env[69992]: DEBUG nova.network.neutron [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Refreshing network info cache for port c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1363.641693] env[69992]: DEBUG oslo_vmware.api [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897935, 'name': PowerOffVM_Task, 'duration_secs': 0.252383} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.642232] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1363.642232] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1363.642423] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4232c1a-3243-460e-ad4b-4fb349e060d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.679977] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1363.711259] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1363.711259] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1363.711259] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleting the datastore file [datastore2] 3f44442d-82b1-4669-8d65-0088d4a9babb {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1363.711259] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58863f34-c367-4872-8ae9-016ee2fe2e43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.717597] env[69992]: DEBUG oslo_vmware.api [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1363.717597] env[69992]: value = "task-2897937" [ 1363.717597] env[69992]: _type = "Task" [ 1363.717597] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.736693] env[69992]: DEBUG oslo_vmware.api [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897937, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.967412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ec5759-230f-4bf8-8db1-42c120a9d037 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.981332] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240358a7-246a-4813-bf88-dbad78a32d46 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.019092] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2de49e-4281-4285-b819-bbcacf3a9b32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.030975] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1393218a-2cb7-4b0d-b839-40e3b53b01bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.035147] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897934, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.045250] env[69992]: DEBUG nova.compute.provider_tree [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.234540] env[69992]: DEBUG oslo_vmware.api [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2897937, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193617} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.234834] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1364.235068] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1364.235265] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1364.235467] env[69992]: INFO nova.compute.manager [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1364.235729] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1364.235935] env[69992]: DEBUG nova.compute.manager [-] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1364.236049] env[69992]: DEBUG nova.network.neutron [-] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1364.249095] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-6ccc70f5-4857-4af3-99a1-f60ec35aebaf-3d571c52-27cf-411e-86f3-279b842e93ca" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1364.249971] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-6ccc70f5-4857-4af3-99a1-f60ec35aebaf-3d571c52-27cf-411e-86f3-279b842e93ca" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1364.527428] env[69992]: DEBUG oslo_vmware.api [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897934, 'name': PowerOnVM_Task, 'duration_secs': 1.106559} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.527681] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1364.527881] env[69992]: INFO nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Took 6.37 seconds to spawn the instance on the hypervisor. [ 1364.528354] env[69992]: DEBUG nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1364.529136] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c61a19d-5e98-40c1-89e2-644ade716646 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.548565] env[69992]: DEBUG nova.scheduler.client.report [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1364.585898] env[69992]: DEBUG nova.network.neutron [-] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.592552] env[69992]: DEBUG nova.network.neutron [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Updated VIF entry in instance network info cache for port c41aefad-ecba-4fa1-ae2e-2586734ffa8a. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.592999] env[69992]: DEBUG nova.network.neutron [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Updating instance_info_cache with network_info: [{"id": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "address": "fa:16:3e:ab:9f:f9", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc41aefad-ec", "ovs_interfaceid": "c41aefad-ecba-4fa1-ae2e-2586734ffa8a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.604460] env[69992]: DEBUG nova.compute.manager [req-c1963f5c-42d1-4744-a0bd-2521f3db6994 req-a392c663-c711-444e-aac9-954e09b4049d service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Received event network-vif-deleted-1af32739-5591-409e-8d79-66f78e068c14 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1364.604641] env[69992]: INFO nova.compute.manager [req-c1963f5c-42d1-4744-a0bd-2521f3db6994 req-a392c663-c711-444e-aac9-954e09b4049d service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Neutron deleted interface 1af32739-5591-409e-8d79-66f78e068c14; detaching it from the instance and deleting it from the info cache [ 1364.604822] env[69992]: DEBUG nova.network.neutron [req-c1963f5c-42d1-4744-a0bd-2521f3db6994 req-a392c663-c711-444e-aac9-954e09b4049d service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.693109] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1364.721330] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1364.721467] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1364.721606] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1364.721837] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1364.722010] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1364.722168] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1364.722383] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1364.722541] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1364.722709] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1364.722872] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1364.723057] env[69992]: DEBUG nova.virt.hardware [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1364.723931] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2796677a-480a-4fc5-ab9a-8c55f3b1844c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.732319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b227ed8a-0144-443c-a116-63d25bd693d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.755115] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.755115] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.755115] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2e1bba-68ba-4fc2-b387-ef5f712362ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.772074] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c86684e-4ced-4cda-b265-a2470b6a0cae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.799514] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Reconfiguring VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1364.799514] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c16104a-3e88-4b54-ba58-bfb9819d5a70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.817290] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1364.817290] env[69992]: value = "task-2897938" [ 1364.817290] env[69992]: _type = "Task" [ 1364.817290] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.827256] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.003307] env[69992]: DEBUG nova.network.neutron [-] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.045028] env[69992]: INFO nova.compute.manager [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Took 24.21 seconds to build instance. [ 1365.070325] env[69992]: DEBUG nova.network.neutron [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Successfully updated port: bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1365.088430] env[69992]: INFO nova.compute.manager [-] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Took 1.47 seconds to deallocate network for instance. [ 1365.095065] env[69992]: DEBUG oslo_concurrency.lockutils [req-ae0da774-89f1-4f14-a89e-90436ef60985 req-9657a642-2898-4fe5-8cc0-dd0a713cbe8c service nova] Releasing lock "refresh_cache-5c8b5f76-918a-44ac-b5b4-5f5f252da936" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.107506] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ebebe02-4a35-415f-aa9a-e5655e5cb819 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.117060] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1b690f-4ce2-4e8d-93db-deef74da8a84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.149899] env[69992]: DEBUG nova.compute.manager [req-c1963f5c-42d1-4744-a0bd-2521f3db6994 req-a392c663-c711-444e-aac9-954e09b4049d service nova] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Detach interface failed, port_id=1af32739-5591-409e-8d79-66f78e068c14, reason: Instance 3f44442d-82b1-4669-8d65-0088d4a9babb could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1365.327396] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.445664] env[69992]: INFO nova.compute.manager [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Rebuilding instance [ 1365.486967] env[69992]: DEBUG nova.compute.manager [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1365.487163] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be11a9fd-83f4-4812-8ea2-2be6898f7f1c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.505126] env[69992]: INFO nova.compute.manager [-] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Took 1.27 seconds to deallocate network for instance. [ 1365.547649] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cfc67c24-c851-4354-b240-99185a0a9fec tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "b72eb094-b0fa-4e6f-bc29-c110692c7204" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.723s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.560282] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.888s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.565843] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.891s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.566063] env[69992]: DEBUG nova.objects.instance [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lazy-loading 'resources' on Instance uuid 57702674-4c96-4577-a93f-24ecffebb3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1365.572832] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-bf45e20c-0fd7-4a27-924c-0ae56c6cff82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.572965] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-bf45e20c-0fd7-4a27-924c-0ae56c6cff82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1365.573128] env[69992]: DEBUG nova.network.neutron [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1365.594770] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.665097] env[69992]: DEBUG nova.compute.manager [req-da35d79e-7714-4913-b460-a0eaf11c5319 req-0819ca24-a2fa-47e6-93f1-3676b1b28891 service nova] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Received event network-vif-deleted-5b1da692-8443-4514-a168-fc8c34ae5b4a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1365.828555] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.010493] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.106274] env[69992]: DEBUG nova.network.neutron [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1366.258480] env[69992]: DEBUG nova.network.neutron [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Updating instance_info_cache with network_info: [{"id": "bb5d8567-e3df-4e01-9ce4-3bd611ab4d40", "address": "fa:16:3e:b1:7f:e0", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb5d8567-e3", "ovs_interfaceid": "bb5d8567-e3df-4e01-9ce4-3bd611ab4d40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.301446] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d26a369-464e-40db-934c-ac607e2366af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.308626] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c92883-078e-442f-9e48-049b4a9b898d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.342101] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb06adc0-422a-4c24-bfd5-6be1f293e284 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.352703] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ebdca8-eccb-49fe-871a-e8fc36f9d79e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.356486] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.367922] env[69992]: DEBUG nova.compute.provider_tree [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.501258] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1366.501570] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79b1cc59-011d-46ba-8653-393e6f6d8f72 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.508963] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1366.508963] env[69992]: value = "task-2897939" [ 1366.508963] env[69992]: _type = "Task" [ 1366.508963] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.518174] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.629896] env[69992]: DEBUG nova.compute.manager [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Received event network-vif-plugged-bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1366.630062] env[69992]: DEBUG oslo_concurrency.lockutils [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] Acquiring lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.630317] env[69992]: DEBUG oslo_concurrency.lockutils [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.630470] env[69992]: DEBUG oslo_concurrency.lockutils [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.630657] env[69992]: DEBUG nova.compute.manager [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] No waiting events found dispatching network-vif-plugged-bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1366.630800] env[69992]: WARNING nova.compute.manager [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Received unexpected event network-vif-plugged-bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 for instance with vm_state building and task_state spawning. [ 1366.630967] env[69992]: DEBUG nova.compute.manager [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Received event network-changed-bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1366.631550] env[69992]: DEBUG nova.compute.manager [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Refreshing instance network info cache due to event network-changed-bb5d8567-e3df-4e01-9ce4-3bd611ab4d40. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1366.631780] env[69992]: DEBUG oslo_concurrency.lockutils [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] Acquiring lock "refresh_cache-bf45e20c-0fd7-4a27-924c-0ae56c6cff82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.761966] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-bf45e20c-0fd7-4a27-924c-0ae56c6cff82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1366.761966] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Instance network_info: |[{"id": "bb5d8567-e3df-4e01-9ce4-3bd611ab4d40", "address": "fa:16:3e:b1:7f:e0", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb5d8567-e3", "ovs_interfaceid": "bb5d8567-e3df-4e01-9ce4-3bd611ab4d40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1366.762311] env[69992]: DEBUG oslo_concurrency.lockutils [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] Acquired lock "refresh_cache-bf45e20c-0fd7-4a27-924c-0ae56c6cff82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1366.762497] env[69992]: DEBUG nova.network.neutron [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Refreshing network info cache for port bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1366.763803] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:7f:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb5d8567-e3df-4e01-9ce4-3bd611ab4d40', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1366.771459] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1366.774623] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1366.775086] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2392bf38-bc2f-44d9-bf23-d9b12a9be6fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.795577] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.795577] env[69992]: value = "task-2897940" [ 1366.795577] env[69992]: _type = "Task" [ 1366.795577] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.804083] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897940, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.848205] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.871517] env[69992]: DEBUG nova.scheduler.client.report [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1367.021667] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897939, 'name': PowerOffVM_Task, 'duration_secs': 0.186627} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.021988] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.022259] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1367.023455] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecf588f-aa4b-432f-a26f-4f289d136cb5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.030614] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1367.030853] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e351ae7d-1890-438a-ad97-111e70e2f6a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.036232] env[69992]: DEBUG nova.network.neutron [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Updated VIF entry in instance network info cache for port bb5d8567-e3df-4e01-9ce4-3bd611ab4d40. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1367.036572] env[69992]: DEBUG nova.network.neutron [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Updating instance_info_cache with network_info: [{"id": "bb5d8567-e3df-4e01-9ce4-3bd611ab4d40", "address": "fa:16:3e:b1:7f:e0", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb5d8567-e3", "ovs_interfaceid": "bb5d8567-e3df-4e01-9ce4-3bd611ab4d40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.060752] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1367.060977] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1367.061177] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Deleting the datastore file [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1367.061430] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65807712-1adb-422f-98a3-3ff97834aaa5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.068518] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1367.068518] env[69992]: value = "task-2897942" [ 1367.068518] env[69992]: _type = "Task" [ 1367.068518] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.076450] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.101385] env[69992]: INFO nova.compute.manager [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Swapping old allocation on dict_keys(['9dc5dd7f-a3af-48a9-a04e-f6c1d333da28']) held by migration fdae9135-789e-4e36-84ab-893429246875 for instance [ 1367.125790] env[69992]: DEBUG nova.scheduler.client.report [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Overwriting current allocation {'allocations': {'9dc5dd7f-a3af-48a9-a04e-f6c1d333da28': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 152}}, 'project_id': 'ca458056b0794b08b812f0a4106a448c', 'user_id': '9e7acd70754b4b5d966bcc0662b9a2e8', 'consumer_generation': 1} on consumer 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe {{(pid=69992) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1367.201887] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.202116] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.202300] env[69992]: DEBUG nova.network.neutron [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1367.305429] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897940, 'name': CreateVM_Task, 'duration_secs': 0.315665} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.306262] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1367.306445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.306445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.306749] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1367.307013] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acfffa12-5f25-4250-a82a-5d3855583c86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.311330] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1367.311330] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523745ac-9dc4-2aa7-616f-b33de9ca4087" [ 1367.311330] env[69992]: _type = "Task" [ 1367.311330] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.320672] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523745ac-9dc4-2aa7-616f-b33de9ca4087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.348597] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.377835] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.380182] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.498s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.380420] env[69992]: DEBUG nova.objects.instance [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'pci_requests' on Instance uuid 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.397742] env[69992]: INFO nova.scheduler.client.report [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Deleted allocations for instance 57702674-4c96-4577-a93f-24ecffebb3a7 [ 1367.540238] env[69992]: DEBUG oslo_concurrency.lockutils [req-571b32ec-6754-4205-ad87-fd663d133882 req-9cd37ddd-23a0-43c6-8169-7bcea25ae7c7 service nova] Releasing lock "refresh_cache-bf45e20c-0fd7-4a27-924c-0ae56c6cff82" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1367.578369] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090041} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.579135] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1367.579135] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1367.579135] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1367.822221] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523745ac-9dc4-2aa7-616f-b33de9ca4087, 'name': SearchDatastore_Task, 'duration_secs': 0.009112} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.822521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1367.823284] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1367.823284] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.823284] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.823445] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1367.823590] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-896d1fce-0050-456d-b2b3-91fbc5bd4997 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.831912] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1367.832102] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1367.832804] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9348334c-5fc7-45a9-bed9-9e4a39803497 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.839746] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1367.839746] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7c490-d8c8-51fb-ae0d-2e7381296c85" [ 1367.839746] env[69992]: _type = "Task" [ 1367.839746] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.850807] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b7c490-d8c8-51fb-ae0d-2e7381296c85, 'name': SearchDatastore_Task, 'duration_secs': 0.008372} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.854279] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.854496] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2547f1fb-9d34-493c-9dac-637958ba0368 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.859109] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1367.859109] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a85453-5cd9-da6c-4c57-e5ede3610313" [ 1367.859109] env[69992]: _type = "Task" [ 1367.859109] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.869635] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a85453-5cd9-da6c-4c57-e5ede3610313, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.883736] env[69992]: DEBUG nova.objects.instance [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'numa_topology' on Instance uuid 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1367.904244] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0a3d4dd-c26c-43cd-b6e2-02b248fe9764 tempest-VolumesAdminNegativeTest-1577344158 tempest-VolumesAdminNegativeTest-1577344158-project-member] Lock "57702674-4c96-4577-a93f-24ecffebb3a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.369s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.928925] env[69992]: DEBUG nova.network.neutron [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [{"id": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "address": "fa:16:3e:41:be:e4", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c7ae122-41", "ovs_interfaceid": "2c7ae122-41e5-4605-a33e-4516dd1f5945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.349926] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.368568] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a85453-5cd9-da6c-4c57-e5ede3610313, 'name': SearchDatastore_Task, 'duration_secs': 0.008727} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.368821] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1368.369098] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] bf45e20c-0fd7-4a27-924c-0ae56c6cff82/bf45e20c-0fd7-4a27-924c-0ae56c6cff82.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1368.369361] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2364170b-22a1-4921-a805-1dbccfd339bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.375641] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1368.375641] env[69992]: value = "task-2897943" [ 1368.375641] env[69992]: _type = "Task" [ 1368.375641] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.384261] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.385719] env[69992]: INFO nova.compute.claims [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.433019] env[69992]: DEBUG oslo_concurrency.lockutils [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1368.433019] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82cae6e-6a61-419e-8aa9-278e4a43e203 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.440545] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce68949-c92d-4cc2-9438-8b9f526c6714 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1368.618147] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1368.618583] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1368.618883] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1368.619174] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1368.619514] env[69992]: DEBUG nova.virt.hardware [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1368.620546] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4586613-0901-43f1-bbdc-17ff42983f45 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.629724] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a4cc55-8d5c-414e-9cd4-21d3d30a3580 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.644157] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1368.650166] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1368.650692] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1368.651050] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44146586-3ded-400a-be1f-0da6707f6aa9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.670618] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1368.670618] env[69992]: value = "task-2897944" [ 1368.670618] env[69992]: _type = "Task" [ 1368.670618] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.680426] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897944, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.853883] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.887385] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495941} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.887385] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] bf45e20c-0fd7-4a27-924c-0ae56c6cff82/bf45e20c-0fd7-4a27-924c-0ae56c6cff82.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1368.887385] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1368.887385] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4c03bfa-8b4c-424e-b4b5-fe4bd90cbb1b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.896675] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1368.896675] env[69992]: value = "task-2897945" [ 1368.896675] env[69992]: _type = "Task" [ 1368.896675] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.902971] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.183470] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897944, 'name': CreateVM_Task, 'duration_secs': 0.315746} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.183470] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1369.183902] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.184026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.184342] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1369.184588] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11076eed-d3ea-406b-a83b-204b726dc338 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.189254] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1369.189254] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c26aea-2fa8-cd28-ee94-d4fe16b87ae2" [ 1369.189254] env[69992]: _type = "Task" [ 1369.189254] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.196902] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c26aea-2fa8-cd28-ee94-d4fe16b87ae2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.354447] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.410954] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.225395} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.410954] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1369.410954] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c102a337-d8f6-40c4-899b-30c47dd9d701 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.433428] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] bf45e20c-0fd7-4a27-924c-0ae56c6cff82/bf45e20c-0fd7-4a27-924c-0ae56c6cff82.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1369.435695] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cce4cb6b-b80e-49dd-bb56-4cc44efe1328 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.461637] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1369.461637] env[69992]: value = "task-2897946" [ 1369.461637] env[69992]: _type = "Task" [ 1369.461637] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.476294] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897946, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.532029] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1369.532029] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b21cd2f1-fd15-429f-a017-2b7be62ef146 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.539733] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1369.539733] env[69992]: value = "task-2897947" [ 1369.539733] env[69992]: _type = "Task" [ 1369.539733] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.548322] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.654736] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c53b47e-c437-4578-957d-54e53bcf99fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.663712] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cb68fb-4adc-4be1-88d3-c7ad3a601af2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.702248] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b663519e-228b-4d4b-bf82-b66c6b52b137 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.712023] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c26aea-2fa8-cd28-ee94-d4fe16b87ae2, 'name': SearchDatastore_Task, 'duration_secs': 0.046008} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.712287] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1369.712524] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1369.712770] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.712944] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.713149] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1369.713440] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa1119a2-46ea-4840-9f1d-1c8762d28537 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.716050] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710a2473-21b0-4dd9-9e8c-d4f8de0eda50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.731328] env[69992]: DEBUG nova.compute.provider_tree [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1369.734281] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1369.736346] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1369.738176] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cc3db52-beae-46b2-b970-c5e78a36e09e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.746057] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1369.746057] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52124215-bc52-06b3-3a58-0f53918fba3f" [ 1369.746057] env[69992]: _type = "Task" [ 1369.746057] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.755540] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52124215-bc52-06b3-3a58-0f53918fba3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.852997] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.972180] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897946, 'name': ReconfigVM_Task, 'duration_secs': 0.315141} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.972465] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Reconfigured VM instance instance-00000069 to attach disk [datastore2] bf45e20c-0fd7-4a27-924c-0ae56c6cff82/bf45e20c-0fd7-4a27-924c-0ae56c6cff82.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1369.973139] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07966f6f-5d11-48a1-a2a9-15bf9b57b918 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.980185] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1369.980185] env[69992]: value = "task-2897948" [ 1369.980185] env[69992]: _type = "Task" [ 1369.980185] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.988154] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897948, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.049117] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.238701] env[69992]: DEBUG nova.scheduler.client.report [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1370.257780] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52124215-bc52-06b3-3a58-0f53918fba3f, 'name': SearchDatastore_Task, 'duration_secs': 0.012876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.258990] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-482fc649-8682-4138-8973-de43ddd7b2cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.268853] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1370.268853] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52fbdf66-b630-8811-e38d-75dc244e725a" [ 1370.268853] env[69992]: _type = "Task" [ 1370.268853] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.277042] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fbdf66-b630-8811-e38d-75dc244e725a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.354069] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.490951] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897948, 'name': Rename_Task, 'duration_secs': 0.150824} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.491313] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1370.491526] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5b16ecc-4251-49fc-9636-37d575bb7c8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.497788] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1370.497788] env[69992]: value = "task-2897949" [ 1370.497788] env[69992]: _type = "Task" [ 1370.497788] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.505244] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.549949] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897947, 'name': PowerOffVM_Task, 'duration_secs': 0.800353} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.550515] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1370.551233] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1370.551519] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1370.551604] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1370.551825] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1370.551981] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1370.552146] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1370.552378] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1370.552568] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1370.552742] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1370.552928] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1370.553132] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1370.558168] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-deb6a940-528c-4b89-a489-3d241ae36e10 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.577108] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1370.577108] env[69992]: value = "task-2897950" [ 1370.577108] env[69992]: _type = "Task" [ 1370.577108] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.582587] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.665583] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.665866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.723856] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.724032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.724234] env[69992]: INFO nova.compute.manager [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Shelving [ 1370.743670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.363s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.746167] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.011s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.777617] env[69992]: INFO nova.network.neutron [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating port bb64cf0b-3b8e-4225-ba71-1524625e60a7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1370.783207] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52fbdf66-b630-8811-e38d-75dc244e725a, 'name': SearchDatastore_Task, 'duration_secs': 0.017086} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.783454] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.783709] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1370.783962] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b71590d6-f874-45da-b8e1-1d8f536ea743 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.790352] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1370.790352] env[69992]: value = "task-2897951" [ 1370.790352] env[69992]: _type = "Task" [ 1370.790352] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.798284] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.853927] env[69992]: DEBUG oslo_vmware.api [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2897938, 'name': ReconfigVM_Task, 'duration_secs': 5.752385} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.854301] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.854565] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Reconfigured VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1371.010084] env[69992]: DEBUG oslo_vmware.api [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897949, 'name': PowerOnVM_Task, 'duration_secs': 0.463914} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.010367] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1371.010574] env[69992]: INFO nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Took 6.32 seconds to spawn the instance on the hypervisor. [ 1371.010755] env[69992]: DEBUG nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1371.011602] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ea2a6b-eae0-40d7-bb05-cedf268749d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.084391] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897950, 'name': ReconfigVM_Task, 'duration_secs': 0.188023} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.085210] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfaa0f8-0bf5-44a0-915c-fb162b878a5a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.106796] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1371.107069] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1371.107240] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1371.107426] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1371.107574] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1371.107722] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1371.107931] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1371.108121] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1371.108297] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1371.108465] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1371.108645] env[69992]: DEBUG nova.virt.hardware [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1371.109529] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd1e5531-4e73-440f-bfc1-3e2b237b54b2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.115100] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1371.115100] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525dbc93-2a61-b424-f9a1-4160c635a551" [ 1371.115100] env[69992]: _type = "Task" [ 1371.115100] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.123682] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525dbc93-2a61-b424-f9a1-4160c635a551, 'name': SearchDatastore_Task, 'duration_secs': 0.006222} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.129373] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1371.130323] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fea5ef0e-3e3d-4ce6-ab40-f56089c037d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.147451] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1371.147451] env[69992]: value = "task-2897952" [ 1371.147451] env[69992]: _type = "Task" [ 1371.147451] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.155124] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897952, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.169481] env[69992]: DEBUG nova.compute.utils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1371.300778] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897951, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.532826] env[69992]: INFO nova.compute.manager [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Took 23.50 seconds to build instance. [ 1371.660141] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897952, 'name': ReconfigVM_Task, 'duration_secs': 0.417714} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.660141] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1371.660899] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c58734-da9b-4405-8182-ed0afcb74dfb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.681517] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.692015] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1371.692015] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c365ae95-f29e-4523-8cf4-43da0086cb54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.710504] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1371.710504] env[69992]: value = "task-2897953" [ 1371.710504] env[69992]: _type = "Task" [ 1371.710504] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.719383] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897953, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.741483] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1371.741483] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-202ca8af-e69e-4c5f-924c-d6ec2caaaf3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.747180] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1371.747180] env[69992]: value = "task-2897954" [ 1371.747180] env[69992]: _type = "Task" [ 1371.747180] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.756827] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.780747] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fcbe1142-72dc-4a02-af9b-e03a2031a247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.780971] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.781135] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e95e47c2-d82e-4153-8d16-7b65d992e91a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.781274] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.781419] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1371.781544] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 6ccc70f5-4857-4af3-99a1-f60ec35aebaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.781663] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 9464339a-b760-47e9-bc75-e88ce18bf71b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.781797] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.781938] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 3f44442d-82b1-4669-8d65-0088d4a9babb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1371.782074] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fe3624b0-7d4a-4a16-83e3-3f28c2a74006 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.782221] env[69992]: WARNING nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1371.782342] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 5c8b5f76-918a-44ac-b5b4-5f5f252da936 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.782457] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance b72eb094-b0fa-4e6f-bc29-c110692c7204 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.782615] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance bf45e20c-0fd7-4a27-924c-0ae56c6cff82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.782694] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 08869f38-9609-4f7f-9110-2f26fd1cb3f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.782903] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1371.783171] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1371.806481] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897951, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70186} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.809379] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1371.809627] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1371.810261] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb88c92e-96e4-4cf2-acb2-3ed08f2f8fba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.817648] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1371.817648] env[69992]: value = "task-2897955" [ 1371.817648] env[69992]: _type = "Task" [ 1371.817648] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.832108] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.995774] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696ffb1c-3d6b-4d7c-905c-c50287dbddab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.005692] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a63b8a-e04b-455b-aa3f-6e193de2603a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.038328] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74110835-5c53-4635-9a15-b7daf4983eba tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.012s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.039402] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eada804-c3b7-493b-ad2e-0755024a0f30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.047490] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567b21e6-7ef0-40fb-ba41-25e041293b06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.061712] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.200324] env[69992]: DEBUG nova.compute.manager [req-e0d2db69-59ef-48fe-98be-c462d1a9d92b req-009d6049-7b2d-4205-ade6-d59fd65cb596 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-vif-plugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1372.201592] env[69992]: DEBUG oslo_concurrency.lockutils [req-e0d2db69-59ef-48fe-98be-c462d1a9d92b req-009d6049-7b2d-4205-ade6-d59fd65cb596 service nova] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.201908] env[69992]: DEBUG oslo_concurrency.lockutils [req-e0d2db69-59ef-48fe-98be-c462d1a9d92b req-009d6049-7b2d-4205-ade6-d59fd65cb596 service nova] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.202256] env[69992]: DEBUG oslo_concurrency.lockutils [req-e0d2db69-59ef-48fe-98be-c462d1a9d92b req-009d6049-7b2d-4205-ade6-d59fd65cb596 service nova] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.202492] env[69992]: DEBUG nova.compute.manager [req-e0d2db69-59ef-48fe-98be-c462d1a9d92b req-009d6049-7b2d-4205-ade6-d59fd65cb596 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] No waiting events found dispatching network-vif-plugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1372.202719] env[69992]: WARNING nova.compute.manager [req-e0d2db69-59ef-48fe-98be-c462d1a9d92b req-009d6049-7b2d-4205-ade6-d59fd65cb596 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received unexpected event network-vif-plugged-bb64cf0b-3b8e-4225-ba71-1524625e60a7 for instance with vm_state shelved_offloaded and task_state spawning. [ 1372.222209] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897953, 'name': ReconfigVM_Task, 'duration_secs': 0.308091} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.222521] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe/1f9f3bdf-c806-4ac9-85f3-6b33b983fafe.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1372.223955] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ed41f4-18d6-4069-8388-4a5098a428f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.256094] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf18ad4-a75d-4f98-a2fe-fd2032ec4187 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.264465] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897954, 'name': PowerOffVM_Task, 'duration_secs': 0.305113} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.280179] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1372.282084] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9d398e-0e38-4020-abdb-d8bbabccdbb4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.285090] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f870688-5987-4f8d-991b-4c5557e4a241 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.326175] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.326399] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.326566] env[69992]: DEBUG nova.network.neutron [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1372.328973] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e780ac0-a606-45db-9ae2-844872567331 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.335361] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33281f8-2f21-4e2b-9047-a78319586ce9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.349820] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1372.349949] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135755} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.350142] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abad0c88-70ad-46c8-afff-666f47ccce4c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.355019] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1372.355019] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd2e095-8feb-45d9-8fa5-b9d0b465245e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.360180] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1372.360180] env[69992]: value = "task-2897956" [ 1372.360180] env[69992]: _type = "Task" [ 1372.360180] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.379388] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1372.381983] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b43b147e-c213-4c46-b722-9e75a86a568c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.404304] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897956, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.405529] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1372.405529] env[69992]: value = "task-2897957" [ 1372.405529] env[69992]: _type = "Task" [ 1372.405529] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.416104] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897957, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.501278] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.501494] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.501647] env[69992]: DEBUG nova.network.neutron [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1372.564605] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1372.760115] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.760115] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.761040] env[69992]: INFO nova.compute.manager [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Attaching volume 38a23a44-927a-49f0-af50-0d71be5adb30 to /dev/sdb [ 1372.803212] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2372ff5a-5405-437d-a70e-894bdccabeef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.814853] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064d05ee-c5f3-48db-a4e9-05cbc56c6450 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.824924] env[69992]: DEBUG nova.virt.block_device [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating existing volume attachment record: 343e3124-a5f0-4feb-87b6-54950edc4954 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1372.846143] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1372.846494] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-60dd2cf0-9acf-4341-a7a0-ce087bde4425 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.853842] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1372.853842] env[69992]: value = "task-2897958" [ 1372.853842] env[69992]: _type = "Task" [ 1372.853842] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.862709] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897958, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.879723] env[69992]: DEBUG oslo_vmware.api [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897956, 'name': PowerOnVM_Task, 'duration_secs': 0.430491} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.880048] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1372.917869] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897957, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.960546] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177d5f50-932c-4119-b5b1-26b2ff2b2fcb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.968842] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Suspending the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1372.968842] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8d480c46-8922-455d-bf44-72e9962d09df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.974356] env[69992]: DEBUG oslo_vmware.api [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1372.974356] env[69992]: value = "task-2897960" [ 1372.974356] env[69992]: _type = "Task" [ 1372.974356] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.983448] env[69992]: DEBUG oslo_vmware.api [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897960, 'name': SuspendVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.070183] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1373.070433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.324s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.070712] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.608s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1373.070903] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.073244] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.478s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1373.073512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.076197] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.066s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1373.076456] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.080781] env[69992]: DEBUG nova.network.neutron [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.113935] env[69992]: INFO nova.scheduler.client.report [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleted allocations for instance 3f44442d-82b1-4669-8d65-0088d4a9babb [ 1373.116469] env[69992]: INFO nova.scheduler.client.report [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Deleted allocations for instance 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3 [ 1373.140383] env[69992]: INFO nova.scheduler.client.report [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4 [ 1373.364382] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897958, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.418691] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897957, 'name': ReconfigVM_Task, 'duration_secs': 0.699632} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.419247] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Reconfigured VM instance instance-00000068 to attach disk [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204/b72eb094-b0fa-4e6f-bc29-c110692c7204.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1373.420814] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47822828-7996-4c17-964b-bf98421d7bf2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.429462] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1373.429462] env[69992]: value = "task-2897963" [ 1373.429462] env[69992]: _type = "Task" [ 1373.429462] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.439786] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897963, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.485905] env[69992]: DEBUG oslo_vmware.api [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897960, 'name': SuspendVM_Task} progress is 62%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.586963] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1373.620031] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c3fda01e94ede77fc79d17627e74fdfb',container_format='bare',created_at=2025-03-10T17:53:53Z,direct_url=,disk_format='vmdk',id=2afccf79-1e06-45e0-bd6d-e1bf4c00e288,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1340095225-shelved',owner='3f3a2959667e41f1b5868994454b21be',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2025-03-10T17:54:10Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1373.620031] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.620031] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1373.620031] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.620031] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1373.621060] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1373.622176] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1373.622519] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1373.622932] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1373.623348] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1373.623723] env[69992]: DEBUG nova.virt.hardware [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1373.625055] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6517ccf3-6b4a-4151-bda7-defdb223c4ce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.629886] env[69992]: INFO nova.network.neutron [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Port 3d571c52-27cf-411e-86f3-279b842e93ca from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1373.630801] env[69992]: DEBUG nova.network.neutron [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.640135] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3827e5ae-8618-4799-88a3-21c475bd5c1a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "3f44442d-82b1-4669-8d65-0088d4a9babb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.041s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.643090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-26f10d89-aefc-47e9-ad66-737561ec7ab8 tempest-ServerDiskConfigTestJSON-649865869 tempest-ServerDiskConfigTestJSON-649865869-project-member] Lock "91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.177s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.646490] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f13c382-151a-4e4f-af3b-9ee9036673ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.657733] env[69992]: DEBUG oslo_concurrency.lockutils [None req-54809039-bc26-4551-bc1a-308c71e63ea7 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.097s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.676648] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:df:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb64cf0b-3b8e-4225-ba71-1524625e60a7', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1373.686024] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1373.686800] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1373.687238] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caac1967-1a85-48ec-8d96-09fb7a4cea57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.708147] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1373.708147] env[69992]: value = "task-2897964" [ 1373.708147] env[69992]: _type = "Task" [ 1373.708147] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.716536] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897964, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.871032] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897958, 'name': CreateSnapshot_Task, 'duration_secs': 0.879646} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.871032] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1373.871032] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfab2358-841a-4a0f-9e54-d47de6f168f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.886548] env[69992]: DEBUG nova.compute.manager [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1373.886548] env[69992]: DEBUG nova.compute.manager [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing instance network info cache due to event network-changed-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1373.886548] env[69992]: DEBUG oslo_concurrency.lockutils [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] Acquiring lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.932269] env[69992]: INFO nova.compute.manager [None req-cc9451b8-3c74-41ca-938f-21193f430497 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance to original state: 'active' [ 1373.945895] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897963, 'name': Rename_Task, 'duration_secs': 0.144527} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.946068] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1373.948064] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d708c61-fa48-487b-89b0-408b47df2c06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.953622] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1373.953622] env[69992]: value = "task-2897965" [ 1373.953622] env[69992]: _type = "Task" [ 1373.953622] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.965064] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.986834] env[69992]: DEBUG oslo_vmware.api [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897960, 'name': SuspendVM_Task, 'duration_secs': 0.631262} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.987209] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Suspended the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1373.987506] env[69992]: DEBUG nova.compute.manager [None req-3861ddbe-2ed9-48b2-b0aa-d6fdd91c8a07 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1373.988456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e637b9d-af0c-45d4-9167-460ac54828cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.138556] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1374.141602] env[69992]: DEBUG oslo_concurrency.lockutils [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] Acquired lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.141872] env[69992]: DEBUG nova.network.neutron [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Refreshing network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.221318] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897964, 'name': CreateVM_Task, 'duration_secs': 0.360487} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.221556] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1374.222292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.222570] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.222853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1374.223476] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bbe8668-2e4f-41e6-a808-e301dc9e3c0b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.228622] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1374.228622] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52861f6e-1194-7a37-1b35-d6d0d89e29c1" [ 1374.228622] env[69992]: _type = "Task" [ 1374.228622] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.238590] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52861f6e-1194-7a37-1b35-d6d0d89e29c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.250480] env[69992]: DEBUG nova.compute.manager [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1374.250730] env[69992]: DEBUG nova.compute.manager [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing instance network info cache due to event network-changed-bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1374.251028] env[69992]: DEBUG oslo_concurrency.lockutils [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] Acquiring lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.251210] env[69992]: DEBUG oslo_concurrency.lockutils [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] Acquired lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.252083] env[69992]: DEBUG nova.network.neutron [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Refreshing network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.392803] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1374.394110] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-79caee50-3218-463d-a568-a512a34d78f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.405607] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1374.405607] env[69992]: value = "task-2897966" [ 1374.405607] env[69992]: _type = "Task" [ 1374.405607] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.421258] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897966, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.470773] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897965, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.644576] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ccfae937-fb13-4a2e-b464-7612b2b7c218 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-6ccc70f5-4857-4af3-99a1-f60ec35aebaf-3d571c52-27cf-411e-86f3-279b842e93ca" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.395s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.740907] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1374.741195] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Processing image 2afccf79-1e06-45e0-bd6d-e1bf4c00e288 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1374.741456] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.741602] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.741781] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.742155] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b15f6850-4c36-45cd-a298-4eda20e615d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.757535] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.757719] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1374.758923] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f79755d-96a4-426e-9db2-a93260a6e0bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.765698] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1374.765698] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522b07a2-478c-419a-d490-8c2dc4d17e8c" [ 1374.765698] env[69992]: _type = "Task" [ 1374.765698] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.776383] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522b07a2-478c-419a-d490-8c2dc4d17e8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.926303] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897966, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.970734] env[69992]: DEBUG oslo_vmware.api [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897965, 'name': PowerOnVM_Task, 'duration_secs': 0.529206} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.971068] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1374.971890] env[69992]: DEBUG nova.compute.manager [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1374.979047] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90bdf96-86c7-49ad-8c05-b9053ac0beb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.084980] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "921f1e1a-6de3-404d-8970-8545db0128f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1375.085287] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "921f1e1a-6de3-404d-8970-8545db0128f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1375.211294] env[69992]: DEBUG nova.network.neutron [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updated VIF entry in instance network info cache for port bb64cf0b-3b8e-4225-ba71-1524625e60a7. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1375.211784] env[69992]: DEBUG nova.network.neutron [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [{"id": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "address": "fa:16:3e:e0:df:48", "network": {"id": "50514bed-dff8-45a5-83f3-ec0c1ece9611", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1240365716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3a2959667e41f1b5868994454b21be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb64cf0b-3b", "ovs_interfaceid": "bb64cf0b-3b8e-4225-ba71-1524625e60a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.277753] env[69992]: DEBUG nova.network.neutron [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updated VIF entry in instance network info cache for port 1b550e88-755a-45a1-98fd-6fcb8fa4a7a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1375.277753] env[69992]: DEBUG nova.network.neutron [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [{"id": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "address": "fa:16:3e:14:a9:b3", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b550e88-75", "ovs_interfaceid": "1b550e88-755a-45a1-98fd-6fcb8fa4a7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.284795] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1375.285105] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Fetch image to [datastore1] OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991/OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1375.285653] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Downloading stream optimized image 2afccf79-1e06-45e0-bd6d-e1bf4c00e288 to [datastore1] OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991/OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991.vmdk on the data store datastore1 as vApp {{(pid=69992) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1375.285653] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Downloading image file data 2afccf79-1e06-45e0-bd6d-e1bf4c00e288 to the ESX as VM named 'OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991' {{(pid=69992) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1375.420472] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897966, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.425102] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1375.425102] env[69992]: value = "resgroup-9" [ 1375.425102] env[69992]: _type = "ResourcePool" [ 1375.425102] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1375.426321] env[69992]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-480b9e92-5802-46f1-a5c6-c672cfd414cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.449187] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease: (returnval){ [ 1375.449187] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52abca15-b970-bf4c-7269-e5bf499b2219" [ 1375.449187] env[69992]: _type = "HttpNfcLease" [ 1375.449187] env[69992]: } obtained for vApp import into resource pool (val){ [ 1375.449187] env[69992]: value = "resgroup-9" [ 1375.449187] env[69992]: _type = "ResourcePool" [ 1375.449187] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1375.449587] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the lease: (returnval){ [ 1375.449587] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52abca15-b970-bf4c-7269-e5bf499b2219" [ 1375.449587] env[69992]: _type = "HttpNfcLease" [ 1375.449587] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1375.464929] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1375.464929] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52abca15-b970-bf4c-7269-e5bf499b2219" [ 1375.464929] env[69992]: _type = "HttpNfcLease" [ 1375.464929] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1375.502655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1375.502655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1375.502655] env[69992]: DEBUG nova.objects.instance [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1375.589504] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1375.715637] env[69992]: DEBUG oslo_concurrency.lockutils [req-d501180f-9e20-470a-ab6d-3c28df218051 req-4df017b9-39b2-48d1-8e5c-9668f5a6a29a service nova] Releasing lock "refresh_cache-08869f38-9609-4f7f-9110-2f26fd1cb3f7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1375.780167] env[69992]: DEBUG oslo_concurrency.lockutils [req-45a957af-3b8c-461c-880a-7191db41950a req-2dc7b3ab-f9fc-4c2d-8774-5d33b32bb293 service nova] Releasing lock "refresh_cache-6ccc70f5-4857-4af3-99a1-f60ec35aebaf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1375.919355] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897966, 'name': CloneVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.959382] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1375.959382] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52abca15-b970-bf4c-7269-e5bf499b2219" [ 1375.959382] env[69992]: _type = "HttpNfcLease" [ 1375.959382] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1376.011805] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.012030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.012168] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.012366] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.012540] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.015648] env[69992]: INFO nova.compute.manager [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Terminating instance [ 1376.084779] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "b72eb094-b0fa-4e6f-bc29-c110692c7204" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.085222] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "b72eb094-b0fa-4e6f-bc29-c110692c7204" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.085554] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "b72eb094-b0fa-4e6f-bc29-c110692c7204-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.085855] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "b72eb094-b0fa-4e6f-bc29-c110692c7204-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.086154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "b72eb094-b0fa-4e6f-bc29-c110692c7204-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.091051] env[69992]: INFO nova.compute.manager [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Terminating instance [ 1376.111387] env[69992]: DEBUG nova.compute.manager [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1376.111691] env[69992]: DEBUG nova.compute.manager [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing instance network info cache due to event network-changed-c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1376.112039] env[69992]: DEBUG oslo_concurrency.lockutils [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.112307] env[69992]: DEBUG oslo_concurrency.lockutils [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1376.112582] env[69992]: DEBUG nova.network.neutron [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1376.122348] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.423393] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897966, 'name': CloneVM_Task, 'duration_secs': 1.586726} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.423674] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Created linked-clone VM from snapshot [ 1376.424552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c15abc9-8064-4ecb-a36e-af2666433fce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.435587] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Uploading image 697d1ada-cc80-456a-9a40-098dcf5fc096 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1376.463765] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1376.463765] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52abca15-b970-bf4c-7269-e5bf499b2219" [ 1376.463765] env[69992]: _type = "HttpNfcLease" [ 1376.463765] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1376.463765] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1376.463765] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52abca15-b970-bf4c-7269-e5bf499b2219" [ 1376.463765] env[69992]: _type = "HttpNfcLease" [ 1376.463765] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1376.463765] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a3561b-35db-4ae5-a261-cbaa2287cc4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.473191] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52561dee-fd9d-99b5-31d7-0e4b4d184c34/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1376.473191] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52561dee-fd9d-99b5-31d7-0e4b4d184c34/disk-0.vmdk. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1376.538088] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cd21d0d-f0b2-4743-946c-72c325a869c5 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.037s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.539528] env[69992]: DEBUG nova.compute.manager [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1376.539775] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1376.542069] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1376.542069] env[69992]: value = "vm-582112" [ 1376.542069] env[69992]: _type = "VirtualMachine" [ 1376.542069] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1376.542926] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.420s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.544126] env[69992]: INFO nova.compute.claims [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1376.547054] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72866b7e-0a10-4b4d-88a5-0dc110db9976 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.549840] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-447a3a4c-9def-4a0c-828d-fa0349d4a9cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.560042] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a8fd0e93-2873-4c5a-b28b-9d307f961af5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.562460] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1376.565525] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a9a90be-e544-41f3-8459-99a581b6ac01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.566346] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lease: (returnval){ [ 1376.566346] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52790a60-c400-7481-8117-41ba77102a7f" [ 1376.566346] env[69992]: _type = "HttpNfcLease" [ 1376.566346] env[69992]: } obtained for exporting VM: (result){ [ 1376.566346] env[69992]: value = "vm-582112" [ 1376.566346] env[69992]: _type = "VirtualMachine" [ 1376.566346] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1376.566784] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the lease: (returnval){ [ 1376.566784] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52790a60-c400-7481-8117-41ba77102a7f" [ 1376.566784] env[69992]: _type = "HttpNfcLease" [ 1376.566784] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1376.577574] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1376.577574] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52790a60-c400-7481-8117-41ba77102a7f" [ 1376.577574] env[69992]: _type = "HttpNfcLease" [ 1376.577574] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1376.577818] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1376.577818] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52790a60-c400-7481-8117-41ba77102a7f" [ 1376.577818] env[69992]: _type = "HttpNfcLease" [ 1376.577818] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1376.580180] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdb905b-b1d3-47fb-a916-0d9337eb718a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.586316] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d2f005-3f05-4471-0e51-bab4be1d7cbd/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1376.589266] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d2f005-3f05-4471-0e51-bab4be1d7cbd/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1376.645402] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "refresh_cache-b72eb094-b0fa-4e6f-bc29-c110692c7204" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.645597] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquired lock "refresh_cache-b72eb094-b0fa-4e6f-bc29-c110692c7204" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1376.645820] env[69992]: DEBUG nova.network.neutron [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.652369] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1376.652607] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1376.652821] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleting the datastore file [datastore2] bf45e20c-0fd7-4a27-924c-0ae56c6cff82 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1376.653686] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac731899-823f-410a-a7bf-bd3f95f2f278 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.660477] env[69992]: DEBUG oslo_vmware.api [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1376.660477] env[69992]: value = "task-2897971" [ 1376.660477] env[69992]: _type = "Task" [ 1376.660477] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.668520] env[69992]: DEBUG oslo_vmware.api [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.694666] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.694907] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.695162] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.695675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1376.695675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.700229] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ac6a838a-03de-4c8a-a011-12b14eba1b2c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.701215] env[69992]: INFO nova.compute.manager [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Terminating instance [ 1376.918990] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.919460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.180317] env[69992]: DEBUG oslo_vmware.api [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2897971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277018} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.181557] env[69992]: DEBUG nova.network.neutron [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.184327] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1377.184602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1377.185041] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1377.185185] env[69992]: INFO nova.compute.manager [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1377.185519] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1377.190158] env[69992]: DEBUG nova.compute.manager [-] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1377.190344] env[69992]: DEBUG nova.network.neutron [-] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1377.205327] env[69992]: DEBUG nova.compute.manager [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1377.205599] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.206081] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-160a7c3d-2b51-43da-94b7-23a1cd2cecaf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.220248] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1377.220248] env[69992]: value = "task-2897972" [ 1377.220248] env[69992]: _type = "Task" [ 1377.220248] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.237288] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.271966] env[69992]: DEBUG nova.network.neutron [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updated VIF entry in instance network info cache for port c35bf17a-173c-4013-b8e4-85b2415e8860. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1377.272602] env[69992]: DEBUG nova.network.neutron [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.314761] env[69992]: DEBUG nova.network.neutron [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.423751] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1377.713676] env[69992]: DEBUG nova.compute.manager [req-f11bca94-0c02-409d-91a7-e3ddd35367be req-a1546efa-c74a-467b-8ef4-b4f6d160ff3e service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Received event network-vif-deleted-bb5d8567-e3df-4e01-9ce4-3bd611ab4d40 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1377.713912] env[69992]: INFO nova.compute.manager [req-f11bca94-0c02-409d-91a7-e3ddd35367be req-a1546efa-c74a-467b-8ef4-b4f6d160ff3e service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Neutron deleted interface bb5d8567-e3df-4e01-9ce4-3bd611ab4d40; detaching it from the instance and deleting it from the info cache [ 1377.714106] env[69992]: DEBUG nova.network.neutron [req-f11bca94-0c02-409d-91a7-e3ddd35367be req-a1546efa-c74a-467b-8ef4-b4f6d160ff3e service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.736711] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897972, 'name': PowerOffVM_Task, 'duration_secs': 0.253673} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.737518] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.737518] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1377.737518] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582083', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'name': 'volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe', 'attached_at': '2025-03-10T17:54:37.000000', 'detached_at': '', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'serial': '5fab4ca8-04ab-4575-95a1-e55e5a73415d'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1377.740107] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2155f8d-34fa-4501-8576-f6aeff74baed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.773374] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1377.773473] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52561dee-fd9d-99b5-31d7-0e4b4d184c34/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1377.774270] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07042b66-6805-4f50-a584-c513c50988d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.777671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba29b5a-e7e3-45fc-93d5-b03eea50a2e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.780990] env[69992]: DEBUG oslo_concurrency.lockutils [req-94658b7b-653c-40ac-a1c8-b1be6263c4b1 req-01109b8f-20e8-402e-a301-fb345a23e432 service nova] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1377.784834] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52561dee-fd9d-99b5-31d7-0e4b4d184c34/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1377.784936] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52561dee-fd9d-99b5-31d7-0e4b4d184c34/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1377.786638] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a87b3191-e240-4fee-9d98-13fece9f0038 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.788870] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f42eb7f-50b0-4bed-a3f7-bb9b94203359 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.819347] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Releasing lock "refresh_cache-b72eb094-b0fa-4e6f-bc29-c110692c7204" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1377.820020] env[69992]: DEBUG nova.compute.manager [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1377.820157] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1377.825674] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8cfef5-12e3-4ad8-93aa-50317293d335 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.829320] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599d0534-5f10-4b62-aa26-8aeb4a25ee67 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.836417] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1377.847434] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a66dba09-ef6f-4e98-8ad3-7029b3345213 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.849203] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] The volume has not been displaced from its original location: [datastore1] volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d/volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1377.854672] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfiguring VM instance instance-00000055 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1377.857806] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1493e47f-4499-4684-a0ed-85716d3c22f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.878149] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1377.878149] env[69992]: value = "task-2897973" [ 1377.878149] env[69992]: _type = "Task" [ 1377.878149] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.878149] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1377.878149] env[69992]: value = "task-2897974" [ 1377.878149] env[69992]: _type = "Task" [ 1377.878149] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.882944] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1377.883194] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1377.887544] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1c6f7d-18a3-4a99-94dd-bc61516f4d38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.898860] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.913224] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897974, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.914971] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b771585-16f6-4356-a5ba-cfe1398d5442 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.919146] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c24708-23f6-4a63-8e63-96005e1e2915 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.950523] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-38a23a44-927a-49f0-af50-0d71be5adb30/volume-38a23a44-927a-49f0-af50-0d71be5adb30.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.955560] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be2f260-d333-4f65-a001-61e2f7c07b57 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.959811] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbd12928-19e3-411d-bad5-9443d1b14abe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.973108] env[69992]: DEBUG oslo_vmware.rw_handles [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52561dee-fd9d-99b5-31d7-0e4b4d184c34/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1377.973419] env[69992]: INFO nova.virt.vmwareapi.images [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Downloaded image file data 2afccf79-1e06-45e0-bd6d-e1bf4c00e288 [ 1377.974613] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fd87eb-369e-41fa-a4bd-26120b928ff1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.022111] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1378.023813] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4290fd93-ecd5-4da1-bd38-f09f90195559 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.026383] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1378.026383] env[69992]: value = "task-2897975" [ 1378.026383] env[69992]: _type = "Task" [ 1378.026383] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.027360] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44741b3-4cb8-44e8-bf8f-0daa20633bab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.043406] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51450042-cac4-485a-a71c-9b7d5d1ce611 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.048071] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.059788] env[69992]: DEBUG nova.compute.provider_tree [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.065028] env[69992]: INFO nova.virt.vmwareapi.images [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] The imported VM was unregistered [ 1378.065287] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1378.065541] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Creating directory with path [datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1378.066278] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9f7090e-462b-424d-8a7f-1d24a239dbfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.085644] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Created directory with path [datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1378.085937] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991/OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991.vmdk to [datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk. {{(pid=69992) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1378.086791] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-cfdd1579-18ed-4c02-a562-01143782026f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.095245] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1378.095245] env[69992]: value = "task-2897977" [ 1378.095245] env[69992]: _type = "Task" [ 1378.095245] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.098773] env[69992]: DEBUG nova.network.neutron [-] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.108602] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897977, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.217745] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c83b08f-e9e0-49c5-9aff-df27500c0448 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.227892] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46438d2f-5b39-47a4-a554-7ace54cd4b49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.269838] env[69992]: DEBUG nova.compute.manager [req-f11bca94-0c02-409d-91a7-e3ddd35367be req-a1546efa-c74a-467b-8ef4-b4f6d160ff3e service nova] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Detach interface failed, port_id=bb5d8567-e3df-4e01-9ce4-3bd611ab4d40, reason: Instance bf45e20c-0fd7-4a27-924c-0ae56c6cff82 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1378.353236] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1378.353817] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.389896] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897973, 'name': PowerOffVM_Task, 'duration_secs': 0.168502} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.390815] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1378.391203] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1378.391839] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26e3f4b6-9fbe-4653-a09a-df3508e81e81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.397551] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897974, 'name': ReconfigVM_Task, 'duration_secs': 0.371522} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.398434] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Reconfigured VM instance instance-00000055 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1378.403742] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7eaa7449-82f8-41dd-a3c3-7047609caee5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.421052] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1378.421052] env[69992]: value = "task-2897979" [ 1378.421052] env[69992]: _type = "Task" [ 1378.421052] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.427589] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1378.427589] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1378.427589] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Deleting the datastore file [datastore1] b72eb094-b0fa-4e6f-bc29-c110692c7204 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1378.428210] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed2eff9f-2443-4748-a236-1c7f26b406d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.437985] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897979, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.439907] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for the task: (returnval){ [ 1378.439907] env[69992]: value = "task-2897980" [ 1378.439907] env[69992]: _type = "Task" [ 1378.439907] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.451307] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.546655] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.570921] env[69992]: DEBUG nova.scheduler.client.report [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1378.601237] env[69992]: INFO nova.compute.manager [-] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Took 1.41 seconds to deallocate network for instance. [ 1378.616528] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897977, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.856968] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1378.938799] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897979, 'name': ReconfigVM_Task, 'duration_secs': 0.251699} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.939975] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582083', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'name': 'volume-5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1f9f3bdf-c806-4ac9-85f3-6b33b983fafe', 'attached_at': '2025-03-10T17:54:37.000000', 'detached_at': '', 'volume_id': '5fab4ca8-04ab-4575-95a1-e55e5a73415d', 'serial': '5fab4ca8-04ab-4575-95a1-e55e5a73415d'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1378.940328] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1378.941627] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c77770-695f-4c49-80de-cc2c452aaa7b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.956438] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.960898] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1378.961227] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cd4de67-2456-476b-977c-c8a82e4f74d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.044568] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897975, 'name': ReconfigVM_Task, 'duration_secs': 0.671588} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.044991] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-38a23a44-927a-49f0-af50-0d71be5adb30/volume-38a23a44-927a-49f0-af50-0d71be5adb30.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1379.049873] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9eed33b-1276-4d99-a567-2a50f1af8e3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.067436] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1379.067436] env[69992]: value = "task-2897982" [ 1379.067436] env[69992]: _type = "Task" [ 1379.067436] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.081721] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1379.082411] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1379.085804] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897982, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.088898] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.065s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.088898] env[69992]: INFO nova.compute.claims [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1379.109362] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897977, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.118062] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.409498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.450906] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.577800] env[69992]: DEBUG oslo_vmware.api [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897982, 'name': ReconfigVM_Task, 'duration_secs': 0.198203} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.578149] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1379.588481] env[69992]: DEBUG nova.compute.utils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1379.590035] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1379.590228] env[69992]: DEBUG nova.network.neutron [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1379.608107] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897977, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.642294] env[69992]: DEBUG nova.policy [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1379.953967] env[69992]: DEBUG oslo_vmware.api [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Task: {'id': task-2897980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.131026} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.954334] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1379.954742] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1379.954742] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1379.954890] env[69992]: INFO nova.compute.manager [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Took 2.13 seconds to destroy the instance on the hypervisor. [ 1379.955036] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1379.955651] env[69992]: DEBUG nova.compute.manager [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1379.955651] env[69992]: DEBUG nova.network.neutron [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1379.989917] env[69992]: DEBUG nova.network.neutron [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1380.001840] env[69992]: DEBUG nova.network.neutron [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Successfully created port: 8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1380.096758] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1380.128104] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897977, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.153075] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1380.153075] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1380.153075] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleting the datastore file [datastore2] 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.153075] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42e249e7-5494-4045-8625-19ab09613869 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.166188] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1380.166188] env[69992]: value = "task-2897983" [ 1380.166188] env[69992]: _type = "Task" [ 1380.166188] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.180383] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.394459] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49779ce-ee91-4e67-b0b5-b67fe175188a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.408080] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af05561-be73-48c6-8508-0dff11ed3fcc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.460205] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c76efc0-cee8-4ac3-9e85-20d9b3a06037 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.470194] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd039812-cb2b-4a0f-b190-9c3fcfd787ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.492762] env[69992]: DEBUG nova.network.neutron [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.497997] env[69992]: DEBUG nova.compute.provider_tree [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1380.617059] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897977, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.427184} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.620158] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991/OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991.vmdk to [datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk. [ 1380.620158] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Cleaning up location [datastore1] OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1380.620158] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d44fe310-c02a-4f0a-a9f6-0f7af48d3991 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.620158] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef34e79b-4b63-4187-a34b-bbb8ec9d541b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.626229] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1380.626229] env[69992]: value = "task-2897984" [ 1380.626229] env[69992]: _type = "Task" [ 1380.626229] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.635069] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.653472] env[69992]: DEBUG nova.objects.instance [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'flavor' on Instance uuid 9464339a-b760-47e9-bc75-e88ce18bf71b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.680025] env[69992]: DEBUG oslo_vmware.api [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2897983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293543} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.680025] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.680025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1380.680025] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1380.680025] env[69992]: INFO nova.compute.manager [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Took 3.47 seconds to destroy the instance on the hypervisor. [ 1380.680025] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1380.680025] env[69992]: DEBUG nova.compute.manager [-] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1380.680025] env[69992]: DEBUG nova.network.neutron [-] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1380.997973] env[69992]: INFO nova.compute.manager [-] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Took 1.04 seconds to deallocate network for instance. [ 1381.001276] env[69992]: DEBUG nova.scheduler.client.report [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.123217] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1381.142274] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03852} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.142812] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.143267] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1381.145083] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk to [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1381.145288] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8415b5c-8fdf-4e92-a0ac-99b8c14f84ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.154133] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1381.154133] env[69992]: value = "task-2897985" [ 1381.154133] env[69992]: _type = "Task" [ 1381.154133] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.162617] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1381.162998] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1381.163252] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1381.163538] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1381.163676] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1381.163842] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1381.164074] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1381.164263] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1381.164439] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1381.164697] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1381.164917] env[69992]: DEBUG nova.virt.hardware [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1381.168349] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ab3900-fe87-423e-8430-cb66b07b4af9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.173887] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5a7bc638-ea12-43ad-b6d6-43af76350c42 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.413s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.180955] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.185409] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d461dc26-9c68-48f1-a09e-8eedf5662ff7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.222103] env[69992]: DEBUG nova.compute.manager [req-e5d94b8f-7991-4990-813c-fc942716159c req-8dc2463d-602d-4a12-86fb-8df5e40f9f35 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Received event network-vif-deleted-2c7ae122-41e5-4605-a33e-4516dd1f5945 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1381.223392] env[69992]: INFO nova.compute.manager [req-e5d94b8f-7991-4990-813c-fc942716159c req-8dc2463d-602d-4a12-86fb-8df5e40f9f35 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Neutron deleted interface 2c7ae122-41e5-4605-a33e-4516dd1f5945; detaching it from the instance and deleting it from the info cache [ 1381.224223] env[69992]: DEBUG nova.network.neutron [req-e5d94b8f-7991-4990-813c-fc942716159c req-8dc2463d-602d-4a12-86fb-8df5e40f9f35 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.507748] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.421s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.508428] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1381.512292] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1381.513542] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.396s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1381.513846] env[69992]: DEBUG nova.objects.instance [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'resources' on Instance uuid bf45e20c-0fd7-4a27-924c-0ae56c6cff82 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1381.632433] env[69992]: DEBUG nova.network.neutron [-] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.666022] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897985, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.716047] env[69992]: INFO nova.compute.manager [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Rebuilding instance [ 1381.721231] env[69992]: DEBUG nova.network.neutron [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Successfully updated port: 8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.728021] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b1228af-c712-4069-b481-276b8939faa0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.740860] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e373db32-0838-47e6-aceb-dd89b319ecf6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.781057] env[69992]: DEBUG nova.compute.manager [req-e5d94b8f-7991-4990-813c-fc942716159c req-8dc2463d-602d-4a12-86fb-8df5e40f9f35 service nova] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Detach interface failed, port_id=2c7ae122-41e5-4605-a33e-4516dd1f5945, reason: Instance 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1381.792681] env[69992]: DEBUG nova.compute.manager [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1381.793625] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334d6be8-8034-4d62-a998-2262364c29d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.018081] env[69992]: DEBUG nova.compute.utils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1382.018081] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1382.018081] env[69992]: DEBUG nova.network.neutron [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.073250] env[69992]: DEBUG nova.policy [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ede57670ddc4434a9ba4745870ddfa14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53308426a9c44f46b78a155e612ee5a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1382.141173] env[69992]: INFO nova.compute.manager [-] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Took 1.46 seconds to deallocate network for instance. [ 1382.166976] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897985, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.226633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-921f1e1a-6de3-404d-8970-8545db0128f2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.226913] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-921f1e1a-6de3-404d-8970-8545db0128f2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1382.227198] env[69992]: DEBUG nova.network.neutron [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.301296] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e3147d-3d2a-4d40-9e0c-d8161ce97d55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.314142] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16878cb5-25ab-4380-9469-e01182c38356 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.346645] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac38a98-27ee-4167-8068-d36f8c914dd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.355938] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226cc3af-f3f5-4fa5-861a-8518b6a278fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.375476] env[69992]: DEBUG nova.compute.provider_tree [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.526205] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1382.666515] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897985, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.667473] env[69992]: DEBUG nova.network.neutron [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Successfully created port: 3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1382.722967] env[69992]: INFO nova.compute.manager [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Took 0.58 seconds to detach 1 volumes for instance. [ 1382.789135] env[69992]: DEBUG nova.network.neutron [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.808487] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1382.808770] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed3c6236-cd11-469e-87bd-d3be760b7695 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.819293] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1382.819293] env[69992]: value = "task-2897986" [ 1382.819293] env[69992]: _type = "Task" [ 1382.819293] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.828192] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.880328] env[69992]: DEBUG nova.scheduler.client.report [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1383.047362] env[69992]: DEBUG nova.network.neutron [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Updating instance_info_cache with network_info: [{"id": "8d35e214-0207-4c75-9f49-da956de6db36", "address": "fa:16:3e:37:b2:f5", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d35e214-02", "ovs_interfaceid": "8d35e214-0207-4c75-9f49-da956de6db36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.168848] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897985, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.231984] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.263254] env[69992]: DEBUG nova.compute.manager [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Received event network-vif-plugged-8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1383.263494] env[69992]: DEBUG oslo_concurrency.lockutils [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] Acquiring lock "921f1e1a-6de3-404d-8970-8545db0128f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.263737] env[69992]: DEBUG oslo_concurrency.lockutils [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] Lock "921f1e1a-6de3-404d-8970-8545db0128f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.263912] env[69992]: DEBUG oslo_concurrency.lockutils [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] Lock "921f1e1a-6de3-404d-8970-8545db0128f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.264100] env[69992]: DEBUG nova.compute.manager [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] No waiting events found dispatching network-vif-plugged-8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1383.264273] env[69992]: WARNING nova.compute.manager [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Received unexpected event network-vif-plugged-8d35e214-0207-4c75-9f49-da956de6db36 for instance with vm_state building and task_state spawning. [ 1383.268030] env[69992]: DEBUG nova.compute.manager [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Received event network-changed-8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1383.268030] env[69992]: DEBUG nova.compute.manager [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Refreshing instance network info cache due to event network-changed-8d35e214-0207-4c75-9f49-da956de6db36. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1383.268030] env[69992]: DEBUG oslo_concurrency.lockutils [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] Acquiring lock "refresh_cache-921f1e1a-6de3-404d-8970-8545db0128f2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.331339] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897986, 'name': PowerOffVM_Task, 'duration_secs': 0.431863} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.332095] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1383.386025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.387352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.978s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.389153] env[69992]: INFO nova.compute.claims [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1383.403372] env[69992]: INFO nova.compute.manager [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Detaching volume 38a23a44-927a-49f0-af50-0d71be5adb30 [ 1383.408420] env[69992]: INFO nova.scheduler.client.report [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocations for instance bf45e20c-0fd7-4a27-924c-0ae56c6cff82 [ 1383.452939] env[69992]: INFO nova.virt.block_device [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Attempting to driver detach volume 38a23a44-927a-49f0-af50-0d71be5adb30 from mountpoint /dev/sdb [ 1383.453312] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1383.453547] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1383.454721] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2f2af5-db70-452b-824c-7a701991da77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.480832] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee3372f-5d49-461e-b974-7348ef675104 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.490737] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a3be65-5f71-4604-b238-7886a229a929 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.516141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac93e40c-6d60-4788-99e8-08f5fba05ac0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.532783] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] The volume has not been displaced from its original location: [datastore1] volume-38a23a44-927a-49f0-af50-0d71be5adb30/volume-38a23a44-927a-49f0-af50-0d71be5adb30.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1383.538226] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1383.539676] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1383.543221] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8694ef24-7970-40cf-a9c0-32210059beb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.557756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-921f1e1a-6de3-404d-8970-8545db0128f2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1383.558243] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Instance network_info: |[{"id": "8d35e214-0207-4c75-9f49-da956de6db36", "address": "fa:16:3e:37:b2:f5", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d35e214-02", "ovs_interfaceid": "8d35e214-0207-4c75-9f49-da956de6db36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1383.558376] env[69992]: DEBUG oslo_concurrency.lockutils [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] Acquired lock "refresh_cache-921f1e1a-6de3-404d-8970-8545db0128f2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1383.558455] env[69992]: DEBUG nova.network.neutron [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Refreshing network info cache for port 8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.560095] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:b2:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d35e214-0207-4c75-9f49-da956de6db36', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.569830] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1383.571293] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.571527] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-950e7ebb-d90b-4fac-bfd1-053711698885 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.590529] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1383.590529] env[69992]: value = "task-2897987" [ 1383.590529] env[69992]: _type = "Task" [ 1383.590529] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.595928] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.595928] env[69992]: value = "task-2897988" [ 1383.595928] env[69992]: _type = "Task" [ 1383.595928] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.598471] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1383.598763] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.599381] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1383.599381] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.599381] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1383.599537] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1383.599704] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1383.599875] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1383.600115] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1383.600303] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1383.600495] env[69992]: DEBUG nova.virt.hardware [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1383.601717] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c7ad38-e640-4b62-889e-2c8827f88c07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.610983] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.622606] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cc5db8-4098-4126-aac9-7a77ccf3ca38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.626490] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897988, 'name': CreateVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.667612] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897985, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.26883} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.667906] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2afccf79-1e06-45e0-bd6d-e1bf4c00e288/2afccf79-1e06-45e0-bd6d-e1bf4c00e288.vmdk to [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1383.672041] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b0243a-42fb-450a-8e39-e5a3c38936c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.692898] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.693228] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-665a2375-cc3e-4b6a-a8a5-94648a5d1f26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.712780] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1383.712780] env[69992]: value = "task-2897989" [ 1383.712780] env[69992]: _type = "Task" [ 1383.712780] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.720912] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.879588] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.879899] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.919646] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d0c8bca5-fbf6-4b83-b6d6-f6d703ca3756 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "bf45e20c-0fd7-4a27-924c-0ae56c6cff82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.907s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.101686] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897987, 'name': ReconfigVM_Task, 'duration_secs': 0.284578} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.107756] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1384.112572] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7327e87-8e81-4420-a868-dcd7a1792b06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.128845] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897988, 'name': CreateVM_Task, 'duration_secs': 0.490764} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.130091] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.130473] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1384.130473] env[69992]: value = "task-2897990" [ 1384.130473] env[69992]: _type = "Task" [ 1384.130473] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.131157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.131328] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1384.131643] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1384.131944] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b45701d-a714-4a45-a618-f885884de389 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.143570] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897990, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.144491] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1384.144491] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]522db11f-f07c-92e8-080e-323f44bbcbf7" [ 1384.144491] env[69992]: _type = "Task" [ 1384.144491] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.152132] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522db11f-f07c-92e8-080e-323f44bbcbf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.224661] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897989, 'name': ReconfigVM_Task, 'duration_secs': 0.310867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.224981] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7/08869f38-9609-4f7f-9110-2f26fd1cb3f7.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.225872] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46c36f58-42d9-444b-ae76-ed9bae896596 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.232244] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1384.232244] env[69992]: value = "task-2897991" [ 1384.232244] env[69992]: _type = "Task" [ 1384.232244] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.240893] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897991, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.385124] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1384.472697] env[69992]: DEBUG nova.network.neutron [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Updated VIF entry in instance network info cache for port 8d35e214-0207-4c75-9f49-da956de6db36. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.473154] env[69992]: DEBUG nova.network.neutron [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Updating instance_info_cache with network_info: [{"id": "8d35e214-0207-4c75-9f49-da956de6db36", "address": "fa:16:3e:37:b2:f5", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d35e214-02", "ovs_interfaceid": "8d35e214-0207-4c75-9f49-da956de6db36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.521127] env[69992]: DEBUG nova.network.neutron [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Successfully updated port: 3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1384.642867] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897990, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.660432] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]522db11f-f07c-92e8-080e-323f44bbcbf7, 'name': SearchDatastore_Task, 'duration_secs': 0.024344} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.660638] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1384.660880] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1384.661139] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.661294] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1384.661474] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1384.662088] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d17e2fb3-4916-438b-825f-78ed711c78a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.671710] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1384.671710] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1384.671710] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f514daa4-d7c8-4bad-af27-d607dc197700 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.680711] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1384.680711] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52696541-8cde-cba0-df0c-30ce9b8bab02" [ 1384.680711] env[69992]: _type = "Task" [ 1384.680711] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.687801] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52696541-8cde-cba0-df0c-30ce9b8bab02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.689417] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36833287-cf3c-4cb4-95c6-bfe8adcea97c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.696090] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb370d1f-096c-4a8d-8342-a250829e2b54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.726714] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab929a1-10ba-4913-89a0-da105a1ac75a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.737311] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2bb866-26cc-40d7-b34a-c2b57dce76b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.745982] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897991, 'name': Rename_Task, 'duration_secs': 0.15287} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.753998] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1384.754558] env[69992]: DEBUG nova.compute.provider_tree [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.755947] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c147d61-4520-40b4-8287-c3ca94ec23c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.763040] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1384.763040] env[69992]: value = "task-2897992" [ 1384.763040] env[69992]: _type = "Task" [ 1384.763040] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.771118] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.908800] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.924126] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d2f005-3f05-4471-0e51-bab4be1d7cbd/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1384.925126] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a070ad4f-c767-4062-ba3a-dfa2b869e15d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.932585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.932810] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.935614] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d2f005-3f05-4471-0e51-bab4be1d7cbd/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1384.935785] env[69992]: ERROR oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d2f005-3f05-4471-0e51-bab4be1d7cbd/disk-0.vmdk due to incomplete transfer. [ 1384.936180] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-02335a4a-9692-404b-8886-a5da728f1f41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.943139] env[69992]: DEBUG oslo_vmware.rw_handles [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d2f005-3f05-4471-0e51-bab4be1d7cbd/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1384.943340] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Uploaded image 697d1ada-cc80-456a-9a40-098dcf5fc096 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1384.945061] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1384.945799] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a2d57ec4-9f3e-4eaf-8e0d-d0b0c6ec457d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.951229] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1384.951229] env[69992]: value = "task-2897993" [ 1384.951229] env[69992]: _type = "Task" [ 1384.951229] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.959743] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897993, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.976179] env[69992]: DEBUG oslo_concurrency.lockutils [req-0eaecd84-decf-48ce-9a97-6405618e2e32 req-25bf5979-abad-4621-aa6a-cf732109b905 service nova] Releasing lock "refresh_cache-921f1e1a-6de3-404d-8970-8545db0128f2" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.024745] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "refresh_cache-d5a6a189-0a7d-49ba-acab-35a244cf76eb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.024938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "refresh_cache-d5a6a189-0a7d-49ba-acab-35a244cf76eb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1385.025178] env[69992]: DEBUG nova.network.neutron [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.114792] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-fe3624b0-7d4a-4a16-83e3-3f28c2a74006-3d571c52-27cf-411e-86f3-279b842e93ca" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.115208] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-fe3624b0-7d4a-4a16-83e3-3f28c2a74006-3d571c52-27cf-411e-86f3-279b842e93ca" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.115490] env[69992]: DEBUG nova.objects.instance [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'flavor' on Instance uuid fe3624b0-7d4a-4a16-83e3-3f28c2a74006 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.143062] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2897990, 'name': ReconfigVM_Task, 'duration_secs': 0.985851} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.143397] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1385.190582] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52696541-8cde-cba0-df0c-30ce9b8bab02, 'name': SearchDatastore_Task, 'duration_secs': 0.009682} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.191405] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6dfed0-7976-4aa3-a958-957abc5bbf04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.198075] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1385.198075] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523a637d-ba50-c8f9-b1ea-0bd663db0ad5" [ 1385.198075] env[69992]: _type = "Task" [ 1385.198075] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.206774] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523a637d-ba50-c8f9-b1ea-0bd663db0ad5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.259621] env[69992]: DEBUG nova.scheduler.client.report [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1385.274616] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897992, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.292275] env[69992]: DEBUG nova.compute.manager [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Received event network-vif-plugged-3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1385.292406] env[69992]: DEBUG oslo_concurrency.lockutils [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] Acquiring lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.292617] env[69992]: DEBUG oslo_concurrency.lockutils [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.292768] env[69992]: DEBUG oslo_concurrency.lockutils [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.292997] env[69992]: DEBUG nova.compute.manager [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] No waiting events found dispatching network-vif-plugged-3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1385.293243] env[69992]: WARNING nova.compute.manager [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Received unexpected event network-vif-plugged-3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 for instance with vm_state building and task_state spawning. [ 1385.293413] env[69992]: DEBUG nova.compute.manager [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Received event network-changed-3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1385.293593] env[69992]: DEBUG nova.compute.manager [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Refreshing instance network info cache due to event network-changed-3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1385.293767] env[69992]: DEBUG oslo_concurrency.lockutils [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] Acquiring lock "refresh_cache-d5a6a189-0a7d-49ba-acab-35a244cf76eb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.436725] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1385.461042] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897993, 'name': Destroy_Task, 'duration_secs': 0.351743} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.461321] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Destroyed the VM [ 1385.461595] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1385.461848] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2005bafd-fd55-47d6-9c0c-1b38213e1d8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.468351] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1385.468351] env[69992]: value = "task-2897994" [ 1385.468351] env[69992]: _type = "Task" [ 1385.468351] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.476145] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897994, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.557703] env[69992]: DEBUG nova.network.neutron [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.710755] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523a637d-ba50-c8f9-b1ea-0bd663db0ad5, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.713883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.714168] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 921f1e1a-6de3-404d-8970-8545db0128f2/921f1e1a-6de3-404d-8970-8545db0128f2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1385.714680] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92e5b4cf-4005-4703-b8a5-720a61ca08fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.721486] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1385.721486] env[69992]: value = "task-2897995" [ 1385.721486] env[69992]: _type = "Task" [ 1385.721486] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.730616] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.730616] env[69992]: DEBUG nova.network.neutron [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Updating instance_info_cache with network_info: [{"id": "3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64", "address": "fa:16:3e:54:bd:db", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c8deaad-b3", "ovs_interfaceid": "3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.751993] env[69992]: DEBUG nova.objects.instance [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'pci_requests' on Instance uuid fe3624b0-7d4a-4a16-83e3-3f28c2a74006 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.765141] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.765750] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1385.773312] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.260s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.773312] env[69992]: DEBUG nova.objects.instance [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lazy-loading 'resources' on Instance uuid b72eb094-b0fa-4e6f-bc29-c110692c7204 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.780569] env[69992]: DEBUG oslo_vmware.api [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2897992, 'name': PowerOnVM_Task, 'duration_secs': 0.538277} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.781486] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1385.885279] env[69992]: DEBUG nova.compute.manager [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1385.886274] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca032c83-d5a6-4abf-a176-26ac362abcac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.963490] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.978413] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2897994, 'name': RemoveSnapshot_Task, 'duration_secs': 0.351527} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.978680] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1385.978956] env[69992]: DEBUG nova.compute.manager [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1385.979905] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5055de-58b1-444a-adbf-39206d4c1a56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.199926] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1386.199926] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ea6027c-ba01-4673-8fe7-2fb31a352966 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.207827] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1386.207827] env[69992]: value = "task-2897996" [ 1386.207827] env[69992]: _type = "Task" [ 1386.207827] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.220177] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1386.220177] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1386.220177] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1386.220412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a47756-c5a7-48da-8318-4eefb917d3fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.254994] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "refresh_cache-d5a6a189-0a7d-49ba-acab-35a244cf76eb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1386.255735] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Instance network_info: |[{"id": "3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64", "address": "fa:16:3e:54:bd:db", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c8deaad-b3", "ovs_interfaceid": "3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1386.255885] env[69992]: DEBUG nova.objects.base [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1386.256160] env[69992]: DEBUG nova.network.neutron [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1386.261628] env[69992]: DEBUG oslo_concurrency.lockutils [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] Acquired lock "refresh_cache-d5a6a189-0a7d-49ba-acab-35a244cf76eb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.261815] env[69992]: DEBUG nova.network.neutron [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Refreshing network info cache for port 3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.263055] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:bd:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.270399] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1386.271157] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb1b0cd-a9a8-4e5b-95f9-57af48026a18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.274111] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430611} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.275121] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.277894] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 921f1e1a-6de3-404d-8970-8545db0128f2/921f1e1a-6de3-404d-8970-8545db0128f2.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1386.278117] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1386.279190] env[69992]: DEBUG nova.compute.utils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1386.280524] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bd66330-f1e7-4be9-973d-54b2fabd32fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.298240] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9ab6456-7635-4ae6-ab0e-7fc807bc3b17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.300387] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1386.300387] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1386.304170] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1386.309450] env[69992]: WARNING nova.virt.vmwareapi.driver [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1386.309794] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1386.313041] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f7275c-2e1b-4759-933b-2beb8592e643 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.318348] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1386.318348] env[69992]: value = "task-2897997" [ 1386.318348] env[69992]: _type = "Task" [ 1386.318348] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.318348] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.318348] env[69992]: value = "task-2897998" [ 1386.318348] env[69992]: _type = "Task" [ 1386.318348] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.328652] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1386.329626] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14227400-ad7f-450f-bb19-58ca5e132989 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.337078] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897998, 'name': CreateVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.337594] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897997, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.375481] env[69992]: DEBUG nova.policy [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0f7a6e9a76342a1a4fd39a8b21a31d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc6fa4e45f4c47c49d67e6efe2eb7a50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1386.382101] env[69992]: DEBUG nova.policy [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4961e3352fc94012a5ad457736da538c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d5c32fe8b254c5abdd4123bd2088353', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1386.404677] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0b14ac98-0ace-4f1f-9512-7518f090b183 tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.549s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1386.415223] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1386.415639] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1386.415861] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.416483] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd46f999-a12b-4418-b9a6-155202dadd24 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.424847] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1386.424847] env[69992]: value = "task-2898000" [ 1386.424847] env[69992]: _type = "Task" [ 1386.424847] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.437590] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.494243] env[69992]: INFO nova.compute.manager [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Shelve offloading [ 1386.554807] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab32dcf-1cfc-4204-91a6-18edf499cae1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.564438] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26e93d5-9068-4863-9782-8664c9d75ed4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.600588] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2085b1e4-e6bf-4cf6-a1f8-dc5a3dd193a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.608834] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5419bf-7314-4906-904e-f0eab7269538 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.628133] env[69992]: DEBUG nova.compute.provider_tree [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.826193] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Successfully created port: 52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1386.834847] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2897998, 'name': CreateVM_Task, 'duration_secs': 0.312257} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.835113] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2897997, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094043} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.835275] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1386.835511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1386.836204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.836364] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.836668] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1386.838027] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2492542-13f1-4008-a89a-9f53e55cb54d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.839707] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbfcee5c-e441-4701-972a-edbaa13405a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.864116] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 921f1e1a-6de3-404d-8970-8545db0128f2/921f1e1a-6de3-404d-8970-8545db0128f2.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1386.864496] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1386.864496] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b8b033-f220-2496-3612-6c32692a1c9c" [ 1386.864496] env[69992]: _type = "Task" [ 1386.864496] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.864841] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df93ae82-ccbc-4315-ad04-e756d87807fe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.890308] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b8b033-f220-2496-3612-6c32692a1c9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009331} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.891527] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1386.891769] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1386.891999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.892163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.892338] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.892639] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1386.892639] env[69992]: value = "task-2898001" [ 1386.892639] env[69992]: _type = "Task" [ 1386.892639] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.892820] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95b339e2-f960-4a7d-94a9-cafe278bde90 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.902270] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898001, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.903232] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.903407] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1386.904270] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6d87b77-5ae3-418a-bd07-660aa536466b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.909214] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1386.909214] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f94a0c-2e96-93f0-a312-c9e7d172eab5" [ 1386.909214] env[69992]: _type = "Task" [ 1386.909214] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.918858] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f94a0c-2e96-93f0-a312-c9e7d172eab5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.934981] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186789} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.935245] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1386.935429] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1386.935604] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1387.000873] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1387.001197] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a57a8acf-3d39-41e9-8720-6e7c33a7d7ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.006074] env[69992]: DEBUG nova.network.neutron [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Updated VIF entry in instance network info cache for port 3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.006444] env[69992]: DEBUG nova.network.neutron [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Updating instance_info_cache with network_info: [{"id": "3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64", "address": "fa:16:3e:54:bd:db", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c8deaad-b3", "ovs_interfaceid": "3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.008872] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1387.008872] env[69992]: value = "task-2898002" [ 1387.008872] env[69992]: _type = "Task" [ 1387.008872] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.020102] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.079744] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Successfully created port: 38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1387.128483] env[69992]: DEBUG nova.scheduler.client.report [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1387.303402] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Successfully created port: 92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1387.317724] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1387.346832] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1387.347099] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.347271] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1387.347459] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.347608] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1387.347755] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1387.347964] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1387.348138] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1387.348307] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1387.348500] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1387.348630] env[69992]: DEBUG nova.virt.hardware [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1387.349510] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2da456-0a4a-4471-b4d6-74f52888d77b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.360650] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203220b3-797f-49f1-9cec-96276f9a0322 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.403289] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898001, 'name': ReconfigVM_Task, 'duration_secs': 0.310136} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.403921] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 921f1e1a-6de3-404d-8970-8545db0128f2/921f1e1a-6de3-404d-8970-8545db0128f2.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1387.405010] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79c44ca8-b80d-4799-a2c0-08c60ee33f33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.413720] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1387.413720] env[69992]: value = "task-2898003" [ 1387.413720] env[69992]: _type = "Task" [ 1387.413720] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.419968] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f94a0c-2e96-93f0-a312-c9e7d172eab5, 'name': SearchDatastore_Task, 'duration_secs': 0.010162} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.421028] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6852d709-624f-49b4-8b44-6e368b57e530 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.426136] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898003, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.428973] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1387.428973] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c4af6a-3123-c123-3629-d5895ec1b810" [ 1387.428973] env[69992]: _type = "Task" [ 1387.428973] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.436695] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c4af6a-3123-c123-3629-d5895ec1b810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.440299] env[69992]: INFO nova.virt.block_device [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Booting with volume 38a23a44-927a-49f0-af50-0d71be5adb30 at /dev/sdb [ 1387.473245] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53d3464b-c728-4976-a1d0-8785758da08c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.482683] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3111d706-cb05-482a-8e51-7e40e754f753 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.513802] env[69992]: DEBUG oslo_concurrency.lockutils [req-d285297b-8ef9-493d-a5f7-bb0a3d2b2a3c req-c49b2b7f-3d74-4a53-b204-e89e8077701b service nova] Releasing lock "refresh_cache-d5a6a189-0a7d-49ba-acab-35a244cf76eb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1387.517551] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1582d6ec-0948-4419-9b6a-d6848b5fceaa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.525936] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1387.526160] env[69992]: DEBUG nova.compute.manager [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1387.526929] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb0b6bb-ff76-40dd-a2d1-bae9854d71c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.532545] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f9ba33-57c4-4d53-8605-a35de68e736b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.545853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.545853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1387.546106] env[69992]: DEBUG nova.network.neutron [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.567720] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1a2dbe-f1ec-4d3d-be59-573133cf3993 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.575162] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908711fa-e2e5-4da0-9dcb-ccca53fc1d59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.590099] env[69992]: DEBUG nova.virt.block_device [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating existing volume attachment record: ad0425e6-68e9-4ff8-acb7-8124ab71aee6 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1387.633975] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.636413] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.405s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.636664] env[69992]: DEBUG nova.objects.instance [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'resources' on Instance uuid 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1387.662868] env[69992]: INFO nova.scheduler.client.report [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Deleted allocations for instance b72eb094-b0fa-4e6f-bc29-c110692c7204 [ 1387.924485] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898003, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.926958] env[69992]: DEBUG nova.compute.manager [req-cb5e859f-a656-4709-883a-9219ecdcadd7 req-46bd599b-d7b4-479f-b621-b33ec8d029d2 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-vif-plugged-3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1387.927235] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb5e859f-a656-4709-883a-9219ecdcadd7 req-46bd599b-d7b4-479f-b621-b33ec8d029d2 service nova] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1387.927402] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb5e859f-a656-4709-883a-9219ecdcadd7 req-46bd599b-d7b4-479f-b621-b33ec8d029d2 service nova] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.927586] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb5e859f-a656-4709-883a-9219ecdcadd7 req-46bd599b-d7b4-479f-b621-b33ec8d029d2 service nova] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.927765] env[69992]: DEBUG nova.compute.manager [req-cb5e859f-a656-4709-883a-9219ecdcadd7 req-46bd599b-d7b4-479f-b621-b33ec8d029d2 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] No waiting events found dispatching network-vif-plugged-3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1387.927943] env[69992]: WARNING nova.compute.manager [req-cb5e859f-a656-4709-883a-9219ecdcadd7 req-46bd599b-d7b4-479f-b621-b33ec8d029d2 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received unexpected event network-vif-plugged-3d571c52-27cf-411e-86f3-279b842e93ca for instance with vm_state active and task_state None. [ 1387.938881] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c4af6a-3123-c123-3629-d5895ec1b810, 'name': SearchDatastore_Task, 'duration_secs': 0.029276} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.939161] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1387.939418] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] d5a6a189-0a7d-49ba-acab-35a244cf76eb/d5a6a189-0a7d-49ba-acab-35a244cf76eb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1387.939680] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10b52d31-29a0-4c25-b7a2-a77b5b6a2ee2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.946256] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1387.946256] env[69992]: value = "task-2898004" [ 1387.946256] env[69992]: _type = "Task" [ 1387.946256] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.954595] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.011781] env[69992]: DEBUG nova.network.neutron [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Successfully updated port: 3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1388.171068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-817782e5-d6e6-4482-acd0-6e25309c3cd6 tempest-ServerShowV257Test-1648629764 tempest-ServerShowV257Test-1648629764-project-member] Lock "b72eb094-b0fa-4e6f-bc29-c110692c7204" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.086s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.245396] env[69992]: DEBUG nova.network.neutron [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.335245] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e7da58-1567-4953-9e03-b022ae622f64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.343150] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb82714-6265-4b7c-9330-a28169ae7934 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.374827] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5eab09-3076-44a0-93fc-3cffbc1a603b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.382840] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f602c9-976a-492c-8a2c-73ab4e1f143f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.397210] env[69992]: DEBUG nova.compute.provider_tree [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.424929] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898003, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.455126] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.515873] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.515873] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1388.516036] env[69992]: DEBUG nova.network.neutron [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1388.751065] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1388.767241] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Successfully updated port: 52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1388.901576] env[69992]: DEBUG nova.scheduler.client.report [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1388.930557] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898003, 'name': Rename_Task, 'duration_secs': 1.287531} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.930848] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1388.931155] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bd3a224-5182-4919-bef0-3526d2d3b4c2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.940801] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1388.940801] env[69992]: value = "task-2898005" [ 1388.940801] env[69992]: _type = "Task" [ 1388.940801] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.954013] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898005, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.959413] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898004, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.048320] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1389.049498] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515d272c-b45c-48aa-b889-10150e3a5b87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.063130] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1389.064151] env[69992]: WARNING nova.network.neutron [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] 7c8e9b14-bcc2-45f2-8b37-5f478b75057e already exists in list: networks containing: ['7c8e9b14-bcc2-45f2-8b37-5f478b75057e']. ignoring it [ 1389.065975] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5e6266e-9c73-4e1a-82fd-9909a06c0ff5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.139036] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1389.139036] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1389.139036] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleting the datastore file [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1389.139036] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f8b8b27-4224-4741-9ff3-63a44e696112 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.145101] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1389.145101] env[69992]: value = "task-2898007" [ 1389.145101] env[69992]: _type = "Task" [ 1389.145101] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.154388] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.407938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.771s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.410233] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.502s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.412985] env[69992]: INFO nova.compute.claims [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1389.454305] env[69992]: DEBUG nova.network.neutron [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d571c52-27cf-411e-86f3-279b842e93ca", "address": "fa:16:3e:5b:0e:7b", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d571c52-27", "ovs_interfaceid": "3d571c52-27cf-411e-86f3-279b842e93ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.456682] env[69992]: INFO nova.scheduler.client.report [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted allocations for instance 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe [ 1389.462343] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898005, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.468863] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898004, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.147986} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.471099] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] d5a6a189-0a7d-49ba-acab-35a244cf76eb/d5a6a189-0a7d-49ba-acab-35a244cf76eb.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.471099] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.471286] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67bb64a3-1f99-4a9e-882e-31b4906ded1c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.478777] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1389.478777] env[69992]: value = "task-2898008" [ 1389.478777] env[69992]: _type = "Task" [ 1389.478777] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.488839] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.655734] env[69992]: DEBUG oslo_vmware.api [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193049} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.655969] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1389.656167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1389.656342] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1389.672927] env[69992]: INFO nova.scheduler.client.report [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted allocations for instance 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 [ 1389.709097] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1389.709352] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1389.709512] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1389.709696] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1389.709844] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1389.709990] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1389.710212] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1389.710372] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1389.710538] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1389.710700] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1389.710871] env[69992]: DEBUG nova.virt.hardware [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1389.711910] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938b5bdf-2cbc-4a84-b46a-55f87c41340e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.719472] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee84b67d-c907-4d3f-9d4b-f6f349467e69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.733201] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:3c:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f44518f-713e-4671-bc22-96c67ac28c8e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1389.740042] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1389.740316] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1389.740562] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d45331da-3ceb-4a45-9211-b18f0730074c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.758797] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1389.758797] env[69992]: value = "task-2898009" [ 1389.758797] env[69992]: _type = "Task" [ 1389.758797] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.765531] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898009, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.952566] env[69992]: DEBUG oslo_vmware.api [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898005, 'name': PowerOnVM_Task, 'duration_secs': 0.534754} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.953279] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1389.953279] env[69992]: INFO nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1389.953279] env[69992]: DEBUG nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1389.954077] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114f65e0-b347-4d99-8831-89881ff4b265 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.958832] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1389.959541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.959626] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1389.960319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745d3450-9684-41fb-a26f-43a1942af9d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.981880] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1389.982125] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1389.982315] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1389.982502] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1389.982650] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1389.982798] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1389.983018] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1389.983199] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1389.983379] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1389.983544] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1389.983717] env[69992]: DEBUG nova.virt.hardware [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1389.990272] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Reconfiguring VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1389.990910] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35f49241-5175-4fc7-b8c6-2d6dff599276 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "1f9f3bdf-c806-4ac9-85f3-6b33b983fafe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.296s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.996197] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aff39103-bd1b-481d-8084-cc55590c3d3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.010094] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-changed-3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1390.010310] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing instance network info cache due to event network-changed-3d571c52-27cf-411e-86f3-279b842e93ca. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1390.010545] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.010696] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1390.010861] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Refreshing network info cache for port 3d571c52-27cf-411e-86f3-279b842e93ca {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.019745] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065997} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.021201] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.021779] env[69992]: DEBUG oslo_vmware.api [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1390.021779] env[69992]: value = "task-2898010" [ 1390.021779] env[69992]: _type = "Task" [ 1390.021779] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.022478] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5156a487-64bb-41e5-993e-8f750b03da63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.051024] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] d5a6a189-0a7d-49ba-acab-35a244cf76eb/d5a6a189-0a7d-49ba-acab-35a244cf76eb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.051379] env[69992]: DEBUG oslo_vmware.api [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898010, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.051912] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e9cad60-a9b5-447c-8205-344a90eb598f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.071601] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1390.071601] env[69992]: value = "task-2898011" [ 1390.071601] env[69992]: _type = "Task" [ 1390.071601] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.079718] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.177669] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.273368] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898009, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.501593] env[69992]: INFO nova.compute.manager [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Took 14.40 seconds to build instance. [ 1390.534355] env[69992]: DEBUG oslo_vmware.api [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.585340] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.614030] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d478f5c-273c-41ee-b264-c46b13df4796 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.621407] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1230c30d-9565-4d22-85bc-02588dc21a99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.655466] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007657f0-00d6-4c20-9bbf-1c5eef929603 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.662759] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513de464-eca7-4a89-8c32-4f350ea1a512 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.676177] env[69992]: DEBUG nova.compute.provider_tree [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.769704] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898009, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.822389] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updated VIF entry in instance network info cache for port 3d571c52-27cf-411e-86f3-279b842e93ca. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1390.822904] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3d571c52-27cf-411e-86f3-279b842e93ca", "address": "fa:16:3e:5b:0e:7b", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d571c52-27", "ovs_interfaceid": "3d571c52-27cf-411e-86f3-279b842e93ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.934791] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Successfully updated port: 38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1391.004101] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f0feef43-4350-40f9-877a-f411e2da6935 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "921f1e1a-6de3-404d-8970-8545db0128f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.919s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.034383] env[69992]: DEBUG oslo_vmware.api [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898010, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.083049] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898011, 'name': ReconfigVM_Task, 'duration_secs': 0.836317} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.083164] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Reconfigured VM instance instance-0000006b to attach disk [datastore1] d5a6a189-0a7d-49ba-acab-35a244cf76eb/d5a6a189-0a7d-49ba-acab-35a244cf76eb.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.083810] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fabfbb56-e08a-4ca2-8c06-16952c607f26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.090528] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1391.090528] env[69992]: value = "task-2898012" [ 1391.090528] env[69992]: _type = "Task" [ 1391.090528] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.098133] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898012, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.179465] env[69992]: DEBUG nova.scheduler.client.report [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.270946] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898009, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.326033] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1391.326297] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-vif-plugged-52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1391.326505] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquiring lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.326744] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.326916] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.327117] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] No waiting events found dispatching network-vif-plugged-52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1391.327298] env[69992]: WARNING nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received unexpected event network-vif-plugged-52bbb2d4-ddca-4b0b-951c-b68eb107fd53 for instance with vm_state building and task_state spawning. [ 1391.327487] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-changed-52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1391.327651] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Refreshing instance network info cache due to event network-changed-52bbb2d4-ddca-4b0b-951c-b68eb107fd53. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1391.327861] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquiring lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.328024] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquired lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1391.328204] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Refreshing network info cache for port 52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1391.535527] env[69992]: DEBUG oslo_vmware.api [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898010, 'name': ReconfigVM_Task, 'duration_secs': 1.019721} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.536117] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1391.536346] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Reconfigured VM to attach interface {{(pid=69992) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1391.599589] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898012, 'name': Rename_Task, 'duration_secs': 0.182039} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.599863] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.600135] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13e0ee2a-3439-428e-9492-ecbbc05d7ea2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.605840] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1391.605840] env[69992]: value = "task-2898013" [ 1391.605840] env[69992]: _type = "Task" [ 1391.605840] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.613558] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.684417] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.685047] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1391.688030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.724s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.688869] env[69992]: INFO nova.compute.claims [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.770533] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898009, 'name': CreateVM_Task, 'duration_secs': 1.842504} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.770756] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1391.771450] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.771617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1391.771937] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1391.772234] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd4543a2-b0cf-4f48-bf2d-1eb5e1bb8b18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.776770] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1391.776770] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521e4028-b57d-f26d-86dc-fdb988cccbe2" [ 1391.776770] env[69992]: _type = "Task" [ 1391.776770] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.784437] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521e4028-b57d-f26d-86dc-fdb988cccbe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.868158] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1391.946892] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1392.042021] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ec643225-54aa-41b7-8d41-e98128431a88 tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-fe3624b0-7d4a-4a16-83e3-3f28c2a74006-3d571c52-27cf-411e-86f3-279b842e93ca" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.926s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.117084] env[69992]: DEBUG oslo_vmware.api [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898013, 'name': PowerOnVM_Task, 'duration_secs': 0.497033} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.117941] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.118201] env[69992]: INFO nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Took 8.58 seconds to spawn the instance on the hypervisor. [ 1392.118428] env[69992]: DEBUG nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1392.119299] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b694f710-9aec-4fae-83fc-771f233fe57f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.193560] env[69992]: DEBUG nova.compute.utils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1392.198291] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1392.198291] env[69992]: DEBUG nova.network.neutron [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1392.250179] env[69992]: DEBUG nova.policy [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c8154a2fd744b9a8c239beef4d590c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e87041ec260a488c9162018da7f5a2ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1392.260510] env[69992]: DEBUG nova.compute.manager [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-vif-plugged-38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1392.260753] env[69992]: DEBUG oslo_concurrency.lockutils [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] Acquiring lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.260962] env[69992]: DEBUG oslo_concurrency.lockutils [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.261164] env[69992]: DEBUG oslo_concurrency.lockutils [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.261333] env[69992]: DEBUG nova.compute.manager [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] No waiting events found dispatching network-vif-plugged-38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1392.261527] env[69992]: WARNING nova.compute.manager [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received unexpected event network-vif-plugged-38bb614b-f887-4e3a-996d-e5b3b1141511 for instance with vm_state building and task_state spawning. [ 1392.261702] env[69992]: DEBUG nova.compute.manager [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-changed-38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1392.261900] env[69992]: DEBUG nova.compute.manager [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Refreshing instance network info cache due to event network-changed-38bb614b-f887-4e3a-996d-e5b3b1141511. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1392.262131] env[69992]: DEBUG oslo_concurrency.lockutils [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] Acquiring lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.290791] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521e4028-b57d-f26d-86dc-fdb988cccbe2, 'name': SearchDatastore_Task, 'duration_secs': 0.008909} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.290791] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1392.291034] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.291756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.291756] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1392.291756] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.291983] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f37048ec-5ba7-4a2c-8cee-e81944185914 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.301814] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.301814] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.302563] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9be3591c-697a-4265-a46a-dcd126e2d565 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.308630] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1392.308630] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521bf5b0-2762-23c2-3d2e-867fb295edd1" [ 1392.308630] env[69992]: _type = "Task" [ 1392.308630] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.317574] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521bf5b0-2762-23c2-3d2e-867fb295edd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.450287] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Releasing lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1392.450630] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-vif-unplugged-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1392.450803] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.451022] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.451220] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.452065] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] No waiting events found dispatching network-vif-unplugged-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1392.452065] env[69992]: WARNING nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received unexpected event network-vif-unplugged-0042c1e4-d906-4261-a18e-ce232533cbdd for instance with vm_state shelved_offloaded and task_state None. [ 1392.452065] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-changed-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1392.452065] env[69992]: DEBUG nova.compute.manager [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Refreshing instance network info cache due to event network-changed-0042c1e4-d906-4261-a18e-ce232533cbdd. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1392.452065] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.452338] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1392.452391] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Refreshing network info cache for port 0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.454104] env[69992]: DEBUG oslo_concurrency.lockutils [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] Acquired lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1392.454104] env[69992]: DEBUG nova.network.neutron [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Refreshing network info cache for port 38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.528841] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.529070] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.586533] env[69992]: DEBUG nova.network.neutron [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Successfully created port: e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.606971] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "921f1e1a-6de3-404d-8970-8545db0128f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.607246] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "921f1e1a-6de3-404d-8970-8545db0128f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.607458] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "921f1e1a-6de3-404d-8970-8545db0128f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.607815] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "921f1e1a-6de3-404d-8970-8545db0128f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.607815] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "921f1e1a-6de3-404d-8970-8545db0128f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1392.611177] env[69992]: INFO nova.compute.manager [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Terminating instance [ 1392.641730] env[69992]: INFO nova.compute.manager [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Took 14.70 seconds to build instance. [ 1392.698810] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1392.820338] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521bf5b0-2762-23c2-3d2e-867fb295edd1, 'name': SearchDatastore_Task, 'duration_secs': 0.012489} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.824303] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf3375fd-fb73-482a-9f9c-739ad338c45c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.831278] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1392.831278] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]520b4c1e-a9bd-3288-9a62-836a59a421fe" [ 1392.831278] env[69992]: _type = "Task" [ 1392.831278] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.838634] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520b4c1e-a9bd-3288-9a62-836a59a421fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.970388] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58097e5e-2c18-439b-9164-ac9a8b11f75a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.979863] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a494fb58-706d-4e99-bc02-ee10c57d041a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.016378] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99382b81-4058-48ea-95a6-02de6bf511b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.024199] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13895a0-0c41-4b24-b7b4-df82a5a19ac6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.031540] env[69992]: DEBUG nova.network.neutron [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1393.031540] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1393.045177] env[69992]: DEBUG nova.compute.provider_tree [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.117666] env[69992]: DEBUG nova.compute.manager [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1393.119178] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1393.121867] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a1d8b8-f762-48a1-878b-076c3394ec8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.130879] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Successfully updated port: 92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1393.137281] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.137629] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a89a1260-f441-4ed1-ad3f-ca27eabc8cce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.144556] env[69992]: DEBUG oslo_concurrency.lockutils [None req-35991062-4d9a-4e87-9083-75d6901ded4d tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.225s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.145164] env[69992]: DEBUG oslo_vmware.api [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1393.145164] env[69992]: value = "task-2898014" [ 1393.145164] env[69992]: _type = "Task" [ 1393.145164] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.154248] env[69992]: DEBUG oslo_vmware.api [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.171084] env[69992]: DEBUG nova.network.neutron [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.285847] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updated VIF entry in instance network info cache for port 0042c1e4-d906-4261-a18e-ce232533cbdd. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1393.286667] env[69992]: DEBUG nova.network.neutron [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.341466] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520b4c1e-a9bd-3288-9a62-836a59a421fe, 'name': SearchDatastore_Task, 'duration_secs': 0.010976} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.341820] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1393.342148] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.342465] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6dbb080f-1f3b-4d92-b5d6-a3040c5e356a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.351718] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1393.351718] env[69992]: value = "task-2898015" [ 1393.351718] env[69992]: _type = "Task" [ 1393.351718] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.360023] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.548059] env[69992]: DEBUG nova.scheduler.client.report [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1393.551898] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "interface-fe3624b0-7d4a-4a16-83e3-3f28c2a74006-3d571c52-27cf-411e-86f3-279b842e93ca" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.552139] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-fe3624b0-7d4a-4a16-83e3-3f28c2a74006-3d571c52-27cf-411e-86f3-279b842e93ca" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.558061] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.610695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.633793] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.654915] env[69992]: DEBUG oslo_vmware.api [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898014, 'name': PowerOffVM_Task, 'duration_secs': 0.29898} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.655185] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.655356] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.655661] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f6e8623-1c75-44f8-a567-12bc06bddca4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.674129] env[69992]: DEBUG oslo_concurrency.lockutils [req-de2c6e56-73ad-4f86-8866-fc1c59a14959 req-8b64cbce-269e-42da-934b-1930d3078e45 service nova] Releasing lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1393.674558] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1393.674720] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1393.710855] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1393.729219] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.729382] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.729568] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore1] 921f1e1a-6de3-404d-8970-8545db0128f2 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.729824] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbacdf1a-91f3-44bf-b831-5be327e23068 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.734118] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1393.734346] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.734570] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1393.734834] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.735023] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1393.735182] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1393.735396] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1393.735559] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1393.735727] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1393.735889] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1393.736075] env[69992]: DEBUG nova.virt.hardware [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1393.736941] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d486916e-15a2-402e-8d23-1dff152594d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.741474] env[69992]: DEBUG oslo_vmware.api [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1393.741474] env[69992]: value = "task-2898017" [ 1393.741474] env[69992]: _type = "Task" [ 1393.741474] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.748113] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82507dbc-9cf3-4468-8a0a-0b52695c2d04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.754940] env[69992]: DEBUG oslo_vmware.api [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.790715] env[69992]: DEBUG oslo_concurrency.lockutils [req-fa7cef55-6596-4e5d-a5c6-e2b93f0806b4 req-70938187-6e88-4ef0-939f-607b9833e215 service nova] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1393.861751] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898015, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.052760] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.053341] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1394.056118] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.879s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.056361] env[69992]: DEBUG nova.objects.instance [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'resources' on Instance uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1394.058490] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.058660] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.059762] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6381dd34-5314-46f3-adbc-eeb31d62cd64 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.079737] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321f70fa-be60-4231-b053-0a5dd48133ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.177473] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Reconfiguring VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1394.177473] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f957a832-7861-49f8-8bfc-4a90b77b4b86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.177473] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1394.177473] env[69992]: value = "task-2898018" [ 1394.177473] env[69992]: _type = "Task" [ 1394.177473] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.177473] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.231482] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1394.252371] env[69992]: DEBUG oslo_vmware.api [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344762} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.252644] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.252851] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.253011] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.253208] env[69992]: INFO nova.compute.manager [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1394.253485] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1394.255870] env[69992]: DEBUG nova.compute.manager [-] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1394.256059] env[69992]: DEBUG nova.network.neutron [-] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.350709] env[69992]: DEBUG nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-vif-plugged-92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1394.350922] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Acquiring lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.351647] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.351899] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.351899] env[69992]: DEBUG nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] No waiting events found dispatching network-vif-plugged-92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1394.352892] env[69992]: WARNING nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received unexpected event network-vif-plugged-92f378ec-95f1-4743-9f83-12cb0a249cd0 for instance with vm_state building and task_state spawning. [ 1394.353032] env[69992]: DEBUG nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-changed-92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1394.353160] env[69992]: DEBUG nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Refreshing instance network info cache due to event network-changed-92f378ec-95f1-4743-9f83-12cb0a249cd0. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1394.353313] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Acquiring lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.366748] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898015, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.384553] env[69992]: DEBUG nova.network.neutron [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Successfully updated port: e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.424757] env[69992]: DEBUG nova.compute.manager [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1394.425920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e64702d-1a9c-4a93-812f-075b87d0a2c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.561790] env[69992]: DEBUG nova.compute.utils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1394.564214] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1394.564214] env[69992]: DEBUG nova.network.neutron [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.568047] env[69992]: DEBUG nova.objects.instance [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'numa_topology' on Instance uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1394.604019] env[69992]: DEBUG nova.policy [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57d2ee1abedf4874bcb44b4076199da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b8716c4b7324052a3472734c655655a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1394.609955] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.610315] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.610499] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.610654] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.610808] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.611042] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.612223] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1394.612484] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.644449] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.776530] env[69992]: DEBUG nova.network.neutron [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Updating instance_info_cache with network_info: [{"id": "52bbb2d4-ddca-4b0b-951c-b68eb107fd53", "address": "fa:16:3e:d9:e1:fe", "network": {"id": "93ea3e4f-4a4c-437f-ba33-4050fbc5d506", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020610766", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52bbb2d4-dd", "ovs_interfaceid": "52bbb2d4-ddca-4b0b-951c-b68eb107fd53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "38bb614b-f887-4e3a-996d-e5b3b1141511", "address": "fa:16:3e:0e:25:f3", "network": {"id": "3f06dd4f-cc91-4623-aeec-65581a280a4d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-681870563", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38bb614b-f8", "ovs_interfaceid": "38bb614b-f887-4e3a-996d-e5b3b1141511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92f378ec-95f1-4743-9f83-12cb0a249cd0", "address": "fa:16:3e:82:85:2a", "network": {"id": "93ea3e4f-4a4c-437f-ba33-4050fbc5d506", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020610766", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92f378ec-95", "ovs_interfaceid": "92f378ec-95f1-4743-9f83-12cb0a249cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.868530] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898015, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.874610] env[69992]: DEBUG nova.network.neutron [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Successfully created port: 0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1394.890533] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "refresh_cache-fc769b20-222e-4ff0-8ffd-7b24e4658b14" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.890533] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquired lock "refresh_cache-fc769b20-222e-4ff0-8ffd-7b24e4658b14" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.890533] env[69992]: DEBUG nova.network.neutron [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.937143] env[69992]: INFO nova.compute.manager [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] instance snapshotting [ 1394.939810] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5040543-1893-4d4d-92ae-e6ed3bb442e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.961055] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f34d1d4-f945-48cc-b964-14497c1ba451 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.044199] env[69992]: DEBUG nova.network.neutron [-] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.067601] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1395.070286] env[69992]: DEBUG nova.objects.base [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Object Instance<25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7> lazy-loaded attributes: resources,numa_topology {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1395.117015] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.144553] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.278740] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23a7cba-f01c-47af-8883-914bb512d70d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.282007] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Releasing lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1395.282453] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance network_info: |[{"id": "52bbb2d4-ddca-4b0b-951c-b68eb107fd53", "address": "fa:16:3e:d9:e1:fe", "network": {"id": "93ea3e4f-4a4c-437f-ba33-4050fbc5d506", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020610766", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52bbb2d4-dd", "ovs_interfaceid": "52bbb2d4-ddca-4b0b-951c-b68eb107fd53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "38bb614b-f887-4e3a-996d-e5b3b1141511", "address": "fa:16:3e:0e:25:f3", "network": {"id": "3f06dd4f-cc91-4623-aeec-65581a280a4d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-681870563", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38bb614b-f8", "ovs_interfaceid": "38bb614b-f887-4e3a-996d-e5b3b1141511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92f378ec-95f1-4743-9f83-12cb0a249cd0", "address": "fa:16:3e:82:85:2a", "network": {"id": "93ea3e4f-4a4c-437f-ba33-4050fbc5d506", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020610766", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92f378ec-95", "ovs_interfaceid": "92f378ec-95f1-4743-9f83-12cb0a249cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1395.282745] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Acquired lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.282961] env[69992]: DEBUG nova.network.neutron [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Refreshing network info cache for port 92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1395.284158] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:e1:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52bbb2d4-ddca-4b0b-951c-b68eb107fd53', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:25:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38bb614b-f887-4e3a-996d-e5b3b1141511', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:85:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92f378ec-95f1-4743-9f83-12cb0a249cd0', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1395.296503] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Creating folder: Project (8d5c32fe8b254c5abdd4123bd2088353). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.299681] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e657d61e-9d5c-42e3-9e1d-0683cdfaacca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.302396] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e2742c-10a2-4985-94a7-9f8fe828f06f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.336720] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d868f1e6-eb9e-4317-a2cf-dda3576589ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.339407] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Created folder: Project (8d5c32fe8b254c5abdd4123bd2088353) in parent group-v581821. [ 1395.339593] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Creating folder: Instances. Parent ref: group-v582117. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.339815] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1045a3ea-f0bc-4009-9d2e-8fde71e676ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.347315] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98008eb3-f42f-49f6-8ff3-62dbba459c5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.352014] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Created folder: Instances in parent group-v582117. [ 1395.352243] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1395.352773] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1395.352961] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b18390b-ceeb-4ef7-95ee-c8b44123d560 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.381050] env[69992]: DEBUG nova.compute.provider_tree [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.394018] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898015, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.748381} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.394018] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1395.394018] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1395.394561] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1395.394561] env[69992]: value = "task-2898021" [ 1395.394561] env[69992]: _type = "Task" [ 1395.394561] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.394971] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e0cdedf-ffc0-4a2d-8a96-7ab9c9430c0e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.405758] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898021, 'name': CreateVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.409020] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1395.409020] env[69992]: value = "task-2898022" [ 1395.409020] env[69992]: _type = "Task" [ 1395.409020] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.415547] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.441401] env[69992]: DEBUG nova.network.neutron [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.471719] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1395.472042] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c8fe357a-a351-46f5-9377-4648d003445d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.480597] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1395.480597] env[69992]: value = "task-2898023" [ 1395.480597] env[69992]: _type = "Task" [ 1395.480597] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.488896] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898023, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.547157] env[69992]: INFO nova.compute.manager [-] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Took 1.29 seconds to deallocate network for instance. [ 1395.620530] env[69992]: DEBUG nova.network.neutron [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Updating instance_info_cache with network_info: [{"id": "e1406cdb-0572-4e8a-9429-723f364d855d", "address": "fa:16:3e:12:ea:2e", "network": {"id": "de561a81-eb9d-486f-9650-de226deb459d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-928883628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87041ec260a488c9162018da7f5a2ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1406cdb-05", "ovs_interfaceid": "e1406cdb-0572-4e8a-9429-723f364d855d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.644450] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.883852] env[69992]: DEBUG nova.scheduler.client.report [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1395.906868] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898021, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.917508] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069659} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.920974] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1395.922487] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c442ec5d-a793-46a8-8b90-5cf9904dc30c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.948106] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1395.948458] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73c6f1fd-7c55-4f4b-bd95-860de30a12e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.969188] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1395.969188] env[69992]: value = "task-2898024" [ 1395.969188] env[69992]: _type = "Task" [ 1395.969188] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.977727] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.989467] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898023, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.057071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.079744] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1396.109774] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1396.110036] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.110302] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1396.110493] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.110643] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1396.110791] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1396.111009] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1396.111210] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1396.111402] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1396.111571] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1396.111797] env[69992]: DEBUG nova.virt.hardware [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1396.112744] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215a69d0-8f4a-4e7e-9fdd-46944bcc0c11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.121703] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77e80d4-5211-4876-91e7-3bf958b48317 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.126755] env[69992]: DEBUG nova.network.neutron [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Updated VIF entry in instance network info cache for port 92f378ec-95f1-4743-9f83-12cb0a249cd0. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1396.127316] env[69992]: DEBUG nova.network.neutron [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Updating instance_info_cache with network_info: [{"id": "52bbb2d4-ddca-4b0b-951c-b68eb107fd53", "address": "fa:16:3e:d9:e1:fe", "network": {"id": "93ea3e4f-4a4c-437f-ba33-4050fbc5d506", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020610766", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52bbb2d4-dd", "ovs_interfaceid": "52bbb2d4-ddca-4b0b-951c-b68eb107fd53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "38bb614b-f887-4e3a-996d-e5b3b1141511", "address": "fa:16:3e:0e:25:f3", "network": {"id": "3f06dd4f-cc91-4623-aeec-65581a280a4d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-681870563", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.78", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38bb614b-f8", "ovs_interfaceid": "38bb614b-f887-4e3a-996d-e5b3b1141511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92f378ec-95f1-4743-9f83-12cb0a249cd0", "address": "fa:16:3e:82:85:2a", "network": {"id": "93ea3e4f-4a4c-437f-ba33-4050fbc5d506", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2020610766", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92f378ec-95", "ovs_interfaceid": "92f378ec-95f1-4743-9f83-12cb0a249cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.128721] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Releasing lock "refresh_cache-fc769b20-222e-4ff0-8ffd-7b24e4658b14" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.128981] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Instance network_info: |[{"id": "e1406cdb-0572-4e8a-9429-723f364d855d", "address": "fa:16:3e:12:ea:2e", "network": {"id": "de561a81-eb9d-486f-9650-de226deb459d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-928883628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87041ec260a488c9162018da7f5a2ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1406cdb-05", "ovs_interfaceid": "e1406cdb-0572-4e8a-9429-723f364d855d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1396.129363] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:ea:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1406cdb-0572-4e8a-9429-723f364d855d', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.136937] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Creating folder: Project (e87041ec260a488c9162018da7f5a2ce). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.138033] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0da8ff21-d22e-4d2b-ae63-6185594a84bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.157119] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.160853] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Created folder: Project (e87041ec260a488c9162018da7f5a2ce) in parent group-v581821. [ 1396.161054] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Creating folder: Instances. Parent ref: group-v582120. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.161531] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e259444-28a6-417c-93f1-65cf285c4687 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.171134] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Created folder: Instances in parent group-v582120. [ 1396.171408] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1396.171621] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.171842] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e4482e2-5ab6-4dce-9af9-59d0e9bd0364 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.191149] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.191149] env[69992]: value = "task-2898027" [ 1396.191149] env[69992]: _type = "Task" [ 1396.191149] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.200453] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898027, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.353872] env[69992]: DEBUG nova.network.neutron [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Successfully updated port: 0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1396.388189] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Received event network-changed-e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1396.388472] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Refreshing instance network info cache due to event network-changed-e1406cdb-0572-4e8a-9429-723f364d855d. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1396.388741] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Acquiring lock "refresh_cache-fc769b20-222e-4ff0-8ffd-7b24e4658b14" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.388940] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Acquired lock "refresh_cache-fc769b20-222e-4ff0-8ffd-7b24e4658b14" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.389175] env[69992]: DEBUG nova.network.neutron [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Refreshing network info cache for port e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.391655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.394478] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.836s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.396064] env[69992]: INFO nova.compute.claims [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.411497] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898021, 'name': CreateVM_Task, 'duration_secs': 0.570419} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.411497] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1396.411497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.411497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.411497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1396.411497] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e4a8375-acdf-4f52-9af1-18f76445e04c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.416173] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1396.416173] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5267cb3c-b6ea-bf74-6eaf-1c3849c323af" [ 1396.416173] env[69992]: _type = "Task" [ 1396.416173] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.424123] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5267cb3c-b6ea-bf74-6eaf-1c3849c323af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.478966] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898024, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.490326] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898023, 'name': CreateSnapshot_Task, 'duration_secs': 0.77925} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.490613] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1396.491517] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12b1d5f-f9ea-494a-a025-36ef58e15530 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.639196] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Releasing lock "refresh_cache-e9018928-5237-4ba1-8c18-9ff1ec64a79c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.639508] env[69992]: DEBUG nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Received event network-vif-plugged-e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1396.639712] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Acquiring lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.639938] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.640128] env[69992]: DEBUG oslo_concurrency.lockutils [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.640344] env[69992]: DEBUG nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] No waiting events found dispatching network-vif-plugged-e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1396.640557] env[69992]: WARNING nova.compute.manager [req-e6dc082f-96f2-41c4-a8f8-58bcc951f16a req-a0e14590-965f-4f6c-ab5b-3645b24ad741 service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Received unexpected event network-vif-plugged-e1406cdb-0572-4e8a-9429-723f364d855d for instance with vm_state building and task_state spawning. [ 1396.650028] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.702936] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898027, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.856948] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.857132] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.857297] env[69992]: DEBUG nova.network.neutron [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1396.902487] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3cfe74db-af56-4a71-bfba-846798c9af40 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.178s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.904285] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.293s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.904285] env[69992]: INFO nova.compute.manager [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Unshelving [ 1396.927432] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5267cb3c-b6ea-bf74-6eaf-1c3849c323af, 'name': SearchDatastore_Task, 'duration_secs': 0.014995} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.927432] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.927657] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1396.927794] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.927943] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1396.928144] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1396.928401] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3be1cc8-ea94-4594-9fc2-3d84b94c7abb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.941910] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1396.942103] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1396.942860] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bc8fb00-6710-4b9d-8e31-06eb7792bbab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.948297] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1396.948297] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c09000-ce91-4a5d-71fc-3dc36f66bc0f" [ 1396.948297] env[69992]: _type = "Task" [ 1396.948297] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.955526] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c09000-ce91-4a5d-71fc-3dc36f66bc0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.980053] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898024, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.009531] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1397.009858] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-18555b5a-55ea-49dc-9710-073305deb28a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.021574] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1397.021574] env[69992]: value = "task-2898028" [ 1397.021574] env[69992]: _type = "Task" [ 1397.021574] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.031012] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898028, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.108396] env[69992]: DEBUG nova.network.neutron [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Updated VIF entry in instance network info cache for port e1406cdb-0572-4e8a-9429-723f364d855d. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.108771] env[69992]: DEBUG nova.network.neutron [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Updating instance_info_cache with network_info: [{"id": "e1406cdb-0572-4e8a-9429-723f364d855d", "address": "fa:16:3e:12:ea:2e", "network": {"id": "de561a81-eb9d-486f-9650-de226deb459d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-928883628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87041ec260a488c9162018da7f5a2ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1406cdb-05", "ovs_interfaceid": "e1406cdb-0572-4e8a-9429-723f364d855d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.153602] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.202393] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898027, 'name': CreateVM_Task, 'duration_secs': 0.608636} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.202602] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.203285] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.203478] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.203805] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1397.204083] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a264e1ad-cfbb-4963-a0f0-11efb14e3931 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.208811] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1397.208811] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6e39b-a681-a31b-4418-8c25b6bca03e" [ 1397.208811] env[69992]: _type = "Task" [ 1397.208811] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.218610] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6e39b-a681-a31b-4418-8c25b6bca03e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.398017] env[69992]: DEBUG nova.network.neutron [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1397.458387] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c09000-ce91-4a5d-71fc-3dc36f66bc0f, 'name': SearchDatastore_Task, 'duration_secs': 0.015319} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.459262] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c0d539d-f86c-45e8-96ae-394dd988a651 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.465619] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1397.465619] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52950e88-6117-65d5-7add-24b81f30e6b9" [ 1397.465619] env[69992]: _type = "Task" [ 1397.465619] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.478702] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52950e88-6117-65d5-7add-24b81f30e6b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.483771] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898024, 'name': ReconfigVM_Task, 'duration_secs': 1.391763} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.484048] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b/9464339a-b760-47e9-bc75-e88ce18bf71b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.485166] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'boot_index': 0, 'encryption_options': None, 'disk_bus': None, 'device_type': 'disk', 'guest_format': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'encrypted': False, 'device_name': '/dev/sda', 'image_id': 'eb50549f-9db8-4c15-a738-0e4b1e9e33fb'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'disk_bus': None, 'device_type': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'}, 'attachment_id': 'ad0425e6-68e9-4ff8-acb7-8124ab71aee6', 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=69992) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1397.485366] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1397.485559] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1397.486352] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf68d50-e3f8-4c19-9972-7ab6bbb98c32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.501441] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cd0d64-ed04-42d1-ab5e-0be3579306d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.527921] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-38a23a44-927a-49f0-af50-0d71be5adb30/volume-38a23a44-927a-49f0-af50-0d71be5adb30.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.530682] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1e333e1-3ec0-4b62-a6a2-4dd870354f66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.551779] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898028, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.552744] env[69992]: DEBUG nova.network.neutron [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [{"id": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "address": "fa:16:3e:a1:2b:47", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b3a8c02-b4", "ovs_interfaceid": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.555463] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1397.555463] env[69992]: value = "task-2898029" [ 1397.555463] env[69992]: _type = "Task" [ 1397.555463] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.565589] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898029, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.612059] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Releasing lock "refresh_cache-fc769b20-222e-4ff0-8ffd-7b24e4658b14" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.612388] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Received event network-vif-deleted-8d35e214-0207-4c75-9f49-da956de6db36 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1397.612388] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Received event network-vif-plugged-0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1397.612632] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1397.612889] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1397.613112] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.613324] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] No waiting events found dispatching network-vif-plugged-0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1397.613565] env[69992]: WARNING nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Received unexpected event network-vif-plugged-0b3a8c02-b431-4538-b679-fba08b7e9e8e for instance with vm_state building and task_state spawning. [ 1397.613773] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Received event network-changed-0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1397.613978] env[69992]: DEBUG nova.compute.manager [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Refreshing instance network info cache due to event network-changed-0b3a8c02-b431-4538-b679-fba08b7e9e8e. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1397.614219] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Acquiring lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.651493] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.722030] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c6e39b-a681-a31b-4418-8c25b6bca03e, 'name': SearchDatastore_Task, 'duration_secs': 0.009426} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.722030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.722275] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1397.722445] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.723032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.723032] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1397.723237] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bc01dda-d13e-41ef-9397-8d76b8a8363d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.726749] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26f25a1-26b2-4314-9078-9fef873d773d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.735941] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8084b658-80a9-448d-9a65-13538ed22116 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.741750] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1397.741965] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1397.742968] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36a55a04-39f4-46d5-92b0-163d1aef143b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.775244] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61352ec-9fe0-4f3f-ba5b-3d8b70a08596 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.778042] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1397.778042] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52eaa904-b1e7-dd38-99fd-f051c02b013f" [ 1397.778042] env[69992]: _type = "Task" [ 1397.778042] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.784454] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9ee54c-6d38-4fc2-8f96-53c48184f2dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.791117] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52eaa904-b1e7-dd38-99fd-f051c02b013f, 'name': SearchDatastore_Task, 'duration_secs': 0.010651} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.792134] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec40d151-25ec-414f-bfe1-a7d71b9aef8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.801644] env[69992]: DEBUG nova.compute.provider_tree [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.805753] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1397.805753] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5286100a-a115-6573-5d6b-1417c84ebabd" [ 1397.805753] env[69992]: _type = "Task" [ 1397.805753] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.812626] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5286100a-a115-6573-5d6b-1417c84ebabd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.928335] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1397.975449] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52950e88-6117-65d5-7add-24b81f30e6b9, 'name': SearchDatastore_Task, 'duration_secs': 0.012277} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.975695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.975951] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e9018928-5237-4ba1-8c18-9ff1ec64a79c/e9018928-5237-4ba1-8c18-9ff1ec64a79c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1397.976214] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e5460cc-ca5b-48e7-b1ba-70e2aeff4211 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.983147] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1397.983147] env[69992]: value = "task-2898030" [ 1397.983147] env[69992]: _type = "Task" [ 1397.983147] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.990671] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.038980] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898028, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.056670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.056992] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Instance network_info: |[{"id": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "address": "fa:16:3e:a1:2b:47", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b3a8c02-b4", "ovs_interfaceid": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1398.057324] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Acquired lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.057516] env[69992]: DEBUG nova.network.neutron [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Refreshing network info cache for port 0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1398.058800] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:2b:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ed4797-90ad-44cd-bbcb-e90b2a8400f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b3a8c02-b431-4538-b679-fba08b7e9e8e', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1398.066785] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1398.069930] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1398.075229] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a4f3553-1a0d-434d-b59c-cf8b48b99a9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.094820] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898029, 'name': ReconfigVM_Task, 'duration_secs': 0.304945} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.097324] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-38a23a44-927a-49f0-af50-0d71be5adb30/volume-38a23a44-927a-49f0-af50-0d71be5adb30.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1398.101890] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1398.101890] env[69992]: value = "task-2898031" [ 1398.101890] env[69992]: _type = "Task" [ 1398.101890] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.102111] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa2144ac-16e9-4493-8b13-303e7b3fc5bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.121790] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898031, 'name': CreateVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.123038] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1398.123038] env[69992]: value = "task-2898032" [ 1398.123038] env[69992]: _type = "Task" [ 1398.123038] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.130684] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898032, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.152631] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.305122] env[69992]: DEBUG nova.scheduler.client.report [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1398.321760] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5286100a-a115-6573-5d6b-1417c84ebabd, 'name': SearchDatastore_Task, 'duration_secs': 0.009371} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.323684] env[69992]: DEBUG nova.network.neutron [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updated VIF entry in instance network info cache for port 0b3a8c02-b431-4538-b679-fba08b7e9e8e. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.324088] env[69992]: DEBUG nova.network.neutron [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [{"id": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "address": "fa:16:3e:a1:2b:47", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b3a8c02-b4", "ovs_interfaceid": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.325523] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.325799] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] fc769b20-222e-4ff0-8ffd-7b24e4658b14/fc769b20-222e-4ff0-8ffd-7b24e4658b14.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1398.326582] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8fba077-b929-45ac-9775-3ca961adcafc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.336019] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1398.336019] env[69992]: value = "task-2898033" [ 1398.336019] env[69992]: _type = "Task" [ 1398.336019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.346228] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.495557] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898030, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.541983] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898028, 'name': CloneVM_Task} progress is 95%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.621413] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898031, 'name': CreateVM_Task, 'duration_secs': 0.365698} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.625018] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1398.625018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.625018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.625018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1398.625018] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c756edc7-1a83-4aa6-8580-524e331753be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.631257] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1398.631257] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5225a961-9f9d-215b-65a2-298269acc5aa" [ 1398.631257] env[69992]: _type = "Task" [ 1398.631257] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.637175] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898032, 'name': ReconfigVM_Task, 'duration_secs': 0.198805} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.637995] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1398.638822] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0a5d0fb-f4d9-4eab-bbd8-36ca0d2b3e9a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.643616] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5225a961-9f9d-215b-65a2-298269acc5aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.651454] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1398.651454] env[69992]: value = "task-2898034" [ 1398.651454] env[69992]: _type = "Task" [ 1398.651454] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.657954] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.665879] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898034, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.814591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.815271] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1398.818394] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.701s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.818533] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.819267] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1398.819267] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.765s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.819267] env[69992]: DEBUG nova.objects.instance [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'resources' on Instance uuid 921f1e1a-6de3-404d-8970-8545db0128f2 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1398.822294] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5606acae-bb2e-4e61-a902-ed2bd3ce7f85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.827770] env[69992]: DEBUG oslo_concurrency.lockutils [req-eb074d7c-fab7-4725-8730-68affd9ae187 req-b7397f84-8acc-4f5d-bc07-07496b853b2b service nova] Releasing lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.832222] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a3bb8c-e5f4-4b76-a906-c7aa8c7d604d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.861843] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6612e8f-27c5-46ac-a3e6-7d94bd5bc9b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.866022] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898033, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.871843] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774f44b1-e7d1-4dd3-823b-49a397b204b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.906584] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179485MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1398.906761] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.996390] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898030, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560643} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.996643] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e9018928-5237-4ba1-8c18-9ff1ec64a79c/e9018928-5237-4ba1-8c18-9ff1ec64a79c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1398.996862] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1398.997141] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6412267c-b2c3-4a2e-a5ec-c987016c255b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.003861] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1399.003861] env[69992]: value = "task-2898035" [ 1399.003861] env[69992]: _type = "Task" [ 1399.003861] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.012509] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.041965] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898028, 'name': CloneVM_Task, 'duration_secs': 1.646767} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.042385] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Created linked-clone VM from snapshot [ 1399.043500] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1653507a-0a41-4949-b95d-f7aa8a34aa30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.052887] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Uploading image 859422ea-3735-48a2-9334-7b77fbf9fdd8 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1399.067589] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1399.067936] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7cac6f60-f348-49eb-86a3-e3afe97b3f91 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.074418] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1399.074418] env[69992]: value = "task-2898036" [ 1399.074418] env[69992]: _type = "Task" [ 1399.074418] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.084073] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898036, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.140409] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5225a961-9f9d-215b-65a2-298269acc5aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01099} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.140754] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1399.140990] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1399.141242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.141393] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.141575] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.141835] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f758af5a-3101-4dff-843d-14e64f4d1fa8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.151910] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.155581] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.155758] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1399.156757] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3ff0e5e-a780-4b2f-b179-f9c03f98cb42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.163755] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898034, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.166991] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1399.166991] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d28003-6bfe-6bc1-4156-d3b5cd2b8f9d" [ 1399.166991] env[69992]: _type = "Task" [ 1399.166991] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.174534] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d28003-6bfe-6bc1-4156-d3b5cd2b8f9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.321959] env[69992]: DEBUG nova.compute.utils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1399.323393] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1399.323576] env[69992]: DEBUG nova.network.neutron [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1399.350146] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898033, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688301} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.350458] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] fc769b20-222e-4ff0-8ffd-7b24e4658b14/fc769b20-222e-4ff0-8ffd-7b24e4658b14.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.350698] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.350953] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5e7ec83-b428-4955-9b1c-13e92f380890 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.359122] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1399.359122] env[69992]: value = "task-2898037" [ 1399.359122] env[69992]: _type = "Task" [ 1399.359122] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.363192] env[69992]: DEBUG nova.policy [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e7acd70754b4b5d966bcc0662b9a2e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca458056b0794b08b812f0a4106a448c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1399.369252] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.519918] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060229} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.519918] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.520503] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263bb7dd-11cb-4882-8850-c0435afeb5d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.547872] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] e9018928-5237-4ba1-8c18-9ff1ec64a79c/e9018928-5237-4ba1-8c18-9ff1ec64a79c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.549181] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b84e5b58-567e-4c19-bd01-dda078179dbf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.564320] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7261939c-5530-484f-a51c-99324a61d781 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.572189] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f63bb9-7d7f-4b10-868c-436c816ff8ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.575415] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1399.575415] env[69992]: value = "task-2898038" [ 1399.575415] env[69992]: _type = "Task" [ 1399.575415] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.614939] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4225470-56cc-41af-9a22-9cb6de41242f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.617145] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898036, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.617385] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.622214] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2810fa39-4404-4c14-b084-44c152988c85 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.636311] env[69992]: DEBUG nova.compute.provider_tree [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.653975] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.654904] env[69992]: DEBUG nova.network.neutron [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Successfully created port: 4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1399.664977] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898034, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.676573] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d28003-6bfe-6bc1-4156-d3b5cd2b8f9d, 'name': SearchDatastore_Task, 'duration_secs': 0.087639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.677402] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb63d60a-1cee-4d34-bc29-505728a8538d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.682762] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1399.682762] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b0e8ba-2d7e-7abf-c341-a2a34f56da8d" [ 1399.682762] env[69992]: _type = "Task" [ 1399.682762] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.690578] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b0e8ba-2d7e-7abf-c341-a2a34f56da8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.827672] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1399.869477] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232825} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.869805] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.870683] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c32ae13-de1c-4661-b038-14348624f619 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.893987] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] fc769b20-222e-4ff0-8ffd-7b24e4658b14/fc769b20-222e-4ff0-8ffd-7b24e4658b14.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.895073] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c0bff8c-4fba-4c0e-ab93-2a106a7a6d11 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.913890] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1399.913890] env[69992]: value = "task-2898039" [ 1399.913890] env[69992]: _type = "Task" [ 1399.913890] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.922222] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898039, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.089844] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898036, 'name': Destroy_Task, 'duration_secs': 0.982239} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.092999] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Destroyed the VM [ 1400.093265] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1400.093569] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.093854] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6d7c30e8-420c-4418-9629-9424da711227 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.099968] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1400.099968] env[69992]: value = "task-2898040" [ 1400.099968] env[69992]: _type = "Task" [ 1400.099968] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.107940] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898040, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.139673] env[69992]: DEBUG nova.scheduler.client.report [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1400.153816] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.163161] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898034, 'name': Rename_Task, 'duration_secs': 1.263761} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.163485] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.163756] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0152f091-9839-4673-8c24-c6d7bea635bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.169896] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1400.169896] env[69992]: value = "task-2898041" [ 1400.169896] env[69992]: _type = "Task" [ 1400.169896] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.178356] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.192790] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b0e8ba-2d7e-7abf-c341-a2a34f56da8d, 'name': SearchDatastore_Task, 'duration_secs': 0.089615} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.193058] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.193334] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c/d50d7460-2b70-45bc-940f-7d45f329fa1c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1400.193599] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dab88bf5-7f07-4ab7-86fa-86d05aaae706 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.200688] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1400.200688] env[69992]: value = "task-2898042" [ 1400.200688] env[69992]: _type = "Task" [ 1400.200688] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.208384] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.426116] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898039, 'name': ReconfigVM_Task, 'duration_secs': 0.27707} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.426554] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Reconfigured VM instance instance-0000006d to attach disk [datastore2] fc769b20-222e-4ff0-8ffd-7b24e4658b14/fc769b20-222e-4ff0-8ffd-7b24e4658b14.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.427430] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15aca155-b671-4f15-8162-b2051cc12ac2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.436764] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1400.436764] env[69992]: value = "task-2898043" [ 1400.436764] env[69992]: _type = "Task" [ 1400.436764] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.451365] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898043, 'name': Rename_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.590213] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898038, 'name': ReconfigVM_Task, 'duration_secs': 0.673436} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.591202] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Reconfigured VM instance instance-0000006c to attach disk [datastore1] e9018928-5237-4ba1-8c18-9ff1ec64a79c/e9018928-5237-4ba1-8c18-9ff1ec64a79c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.592032] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f06b2a10-4e89-4142-9cd1-c91ae7ea104f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.601425] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1400.601425] env[69992]: value = "task-2898044" [ 1400.601425] env[69992]: _type = "Task" [ 1400.601425] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.615541] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898040, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.619227] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898044, 'name': Rename_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.645562] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.826s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1400.648834] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.720s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.648834] env[69992]: DEBUG nova.objects.instance [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'pci_requests' on Instance uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.664387] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.676996] env[69992]: INFO nova.scheduler.client.report [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance 921f1e1a-6de3-404d-8970-8545db0128f2 [ 1400.687024] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.713534] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898042, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.837037] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1400.867026] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1400.867305] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.867465] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1400.867652] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.867803] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1400.867955] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1400.868200] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1400.868363] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1400.868530] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1400.869616] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1400.869616] env[69992]: DEBUG nova.virt.hardware [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1400.870112] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc096ae-66e1-4122-ae80-701db69f6369 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.877748] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1d1179-cb35-43aa-bc80-085df76ae1ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.945993] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898043, 'name': Rename_Task, 'duration_secs': 0.317653} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.946236] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.946492] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f842c96-bb8f-4c8f-897a-50c1662b1743 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.952310] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1400.952310] env[69992]: value = "task-2898045" [ 1400.952310] env[69992]: _type = "Task" [ 1400.952310] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.960839] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.017519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.017519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.017519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.017519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.017519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.018427] env[69992]: INFO nova.compute.manager [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Terminating instance [ 1401.111425] env[69992]: DEBUG oslo_vmware.api [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898040, 'name': RemoveSnapshot_Task, 'duration_secs': 0.830893} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.112350] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1401.117778] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898044, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.159262] env[69992]: DEBUG nova.objects.instance [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'numa_topology' on Instance uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1401.160180] env[69992]: DEBUG oslo_vmware.api [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898018, 'name': ReconfigVM_Task, 'duration_secs': 6.769908} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.160290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1401.160599] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Reconfigured VM to detach interface {{(pid=69992) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1401.180180] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.185068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6606fbdb-8773-4f10-ad29-fa79bb02300b tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "921f1e1a-6de3-404d-8970-8545db0128f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.578s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.215165] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537517} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.215557] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c/d50d7460-2b70-45bc-940f-7d45f329fa1c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.215839] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.216166] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81b6260e-f9b4-4ad9-b2fc-cfb190e5fedd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.222424] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1401.222424] env[69992]: value = "task-2898046" [ 1401.222424] env[69992]: _type = "Task" [ 1401.222424] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.232325] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.253500] env[69992]: DEBUG nova.compute.manager [req-d9a93b77-7a65-4e99-b69a-2c5b98f0e2cc req-5960d35a-b171-43d4-814c-0472dd82dc75 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-vif-plugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1401.253759] env[69992]: DEBUG oslo_concurrency.lockutils [req-d9a93b77-7a65-4e99-b69a-2c5b98f0e2cc req-5960d35a-b171-43d4-814c-0472dd82dc75 service nova] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.253995] env[69992]: DEBUG oslo_concurrency.lockutils [req-d9a93b77-7a65-4e99-b69a-2c5b98f0e2cc req-5960d35a-b171-43d4-814c-0472dd82dc75 service nova] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.254357] env[69992]: DEBUG oslo_concurrency.lockutils [req-d9a93b77-7a65-4e99-b69a-2c5b98f0e2cc req-5960d35a-b171-43d4-814c-0472dd82dc75 service nova] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.254600] env[69992]: DEBUG nova.compute.manager [req-d9a93b77-7a65-4e99-b69a-2c5b98f0e2cc req-5960d35a-b171-43d4-814c-0472dd82dc75 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] No waiting events found dispatching network-vif-plugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1401.254843] env[69992]: WARNING nova.compute.manager [req-d9a93b77-7a65-4e99-b69a-2c5b98f0e2cc req-5960d35a-b171-43d4-814c-0472dd82dc75 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received unexpected event network-vif-plugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 for instance with vm_state building and task_state spawning. [ 1401.349543] env[69992]: DEBUG nova.network.neutron [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Successfully updated port: 4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1401.463308] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.522593] env[69992]: DEBUG nova.compute.manager [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1401.522845] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1401.523777] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3b92d0-a276-4482-b33d-c54e2728ad3d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.531118] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1401.531368] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27753fdb-c9ce-4f70-ab17-e900e959bf50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.537300] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1401.537300] env[69992]: value = "task-2898047" [ 1401.537300] env[69992]: _type = "Task" [ 1401.537300] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.545257] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.615792] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898044, 'name': Rename_Task, 'duration_secs': 0.791155} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.616730] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.616730] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6e83417-d926-4915-b45b-2ecb6d9312b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.619839] env[69992]: WARNING nova.compute.manager [None req-00667340-f3bd-4421-a460-658bc5e81700 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Image not found during snapshot: nova.exception.ImageNotFound: Image 859422ea-3735-48a2-9334-7b77fbf9fdd8 could not be found. [ 1401.625399] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1401.625399] env[69992]: value = "task-2898048" [ 1401.625399] env[69992]: _type = "Task" [ 1401.625399] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.634154] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898048, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.661069] env[69992]: INFO nova.compute.claims [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.680926] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.732410] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073795} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.732695] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1401.733568] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac458cd-869b-4019-b561-9a496368e070 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.756609] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c/d50d7460-2b70-45bc-940f-7d45f329fa1c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.757790] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1df98ad2-f7f6-416d-932d-572d033e6e7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.778840] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1401.778840] env[69992]: value = "task-2898049" [ 1401.778840] env[69992]: _type = "Task" [ 1401.778840] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.787409] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.851035] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.851035] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1401.851177] env[69992]: DEBUG nova.network.neutron [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1401.965127] env[69992]: DEBUG oslo_vmware.api [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898045, 'name': PowerOnVM_Task, 'duration_secs': 0.886825} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.965127] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.965497] env[69992]: INFO nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1401.965544] env[69992]: DEBUG nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1401.967449] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c67a15-087a-47cd-9e51-469eda76eb5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.047997] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898047, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.078801] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "904b8020-3060-4611-bdd4-650e288d69fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.079049] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.136414] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898048, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.150352] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.150416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.150627] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.150889] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.151180] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.153272] env[69992]: INFO nova.compute.manager [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Terminating instance [ 1402.181197] env[69992]: DEBUG oslo_vmware.api [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898041, 'name': PowerOnVM_Task, 'duration_secs': 1.772577} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.181455] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.181728] env[69992]: DEBUG nova.compute.manager [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1402.182492] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25374505-208a-4d19-8a7a-70cb806c27ba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.288718] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898049, 'name': ReconfigVM_Task, 'duration_secs': 0.444794} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.289086] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Reconfigured VM instance instance-0000006e to attach disk [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c/d50d7460-2b70-45bc-940f-7d45f329fa1c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1402.290147] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87869322-bbaf-48ef-81f9-16e5cdbeb4db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.296739] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1402.296739] env[69992]: value = "task-2898050" [ 1402.296739] env[69992]: _type = "Task" [ 1402.296739] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.307738] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898050, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.383454] env[69992]: DEBUG nova.network.neutron [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1402.487582] env[69992]: INFO nova.compute.manager [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Took 17.60 seconds to build instance. [ 1402.549099] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898047, 'name': PowerOffVM_Task, 'duration_secs': 0.517} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.549099] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.549099] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.549099] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-751875a2-ad1b-4192-bc0b-f52a4b74eaf4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.557432] env[69992]: DEBUG nova.network.neutron [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.580936] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1402.615842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.616054] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquired lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.616241] env[69992]: DEBUG nova.network.neutron [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.619127] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.619332] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.619511] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleting the datastore file [datastore1] 5c8b5f76-918a-44ac-b5b4-5f5f252da936 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.620497] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c236f866-9540-4e3b-8d06-0624d8d812f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.629024] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1402.629024] env[69992]: value = "task-2898052" [ 1402.629024] env[69992]: _type = "Task" [ 1402.629024] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.639161] env[69992]: DEBUG oslo_vmware.api [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898048, 'name': PowerOnVM_Task, 'duration_secs': 0.690395} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.642091] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.642307] env[69992]: INFO nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Took 15.32 seconds to spawn the instance on the hypervisor. [ 1402.642505] env[69992]: DEBUG nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1402.642773] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.643479] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1f62e8-8e97-4f81-909a-8fe67b4089fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.657024] env[69992]: DEBUG nova.compute.manager [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1402.657024] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.657733] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcece35-f661-47a4-92d0-4ab56ea87f39 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.665099] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.665099] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23b6b796-7875-47a0-90d5-aa4e306b3317 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.672744] env[69992]: DEBUG oslo_vmware.api [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1402.672744] env[69992]: value = "task-2898053" [ 1402.672744] env[69992]: _type = "Task" [ 1402.672744] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.681493] env[69992]: DEBUG oslo_vmware.api [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.702730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.808891] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898050, 'name': Rename_Task, 'duration_secs': 0.164837} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.809391] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1402.809758] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78079845-d7e0-4e8f-b7b2-9144d5d24a54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.819768] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1402.819768] env[69992]: value = "task-2898054" [ 1402.819768] env[69992]: _type = "Task" [ 1402.819768] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.830272] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.879920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d71b2e7-a58d-42c2-8f00-efc7e5c7264f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.887457] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54de4c0-b9de-4e05-9532-f314a2b65f79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.918661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f061953f-7468-4341-befe-ae73b25e740d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.926188] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46218e69-9952-4583-b34a-ddd78db61b0d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.940809] env[69992]: DEBUG nova.compute.provider_tree [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1402.991142] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b85d2915-30c3-4a36-a991-23a0feebc61d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.111s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.060686] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.061222] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance network_info: |[{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1403.061831] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:a9:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d2794aa-7eaf-404a-bf09-16cf3c357511', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1403.072861] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1403.073614] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1403.073614] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0962b34-1563-4f93-b9cc-e5afe68ad93f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.092329] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.092593] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.092799] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.092994] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1403.093173] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.097366] env[69992]: INFO nova.compute.manager [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Terminating instance [ 1403.100975] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1403.100975] env[69992]: value = "task-2898055" [ 1403.100975] env[69992]: _type = "Task" [ 1403.100975] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.113267] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1403.113484] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898055, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.142954] env[69992]: DEBUG oslo_vmware.api [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.477951} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.143144] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.143539] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1403.143812] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1403.144023] env[69992]: INFO nova.compute.manager [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1403.144314] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1403.144559] env[69992]: DEBUG nova.compute.manager [-] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1403.144713] env[69992]: DEBUG nova.network.neutron [-] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1403.163314] env[69992]: INFO nova.compute.manager [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Took 23.80 seconds to build instance. [ 1403.184861] env[69992]: DEBUG oslo_vmware.api [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898053, 'name': PowerOffVM_Task, 'duration_secs': 0.479485} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.185151] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.185320] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.185576] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa5eb43e-f5a4-49cd-a9b6-8064c6e537b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.248294] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.248523] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.248705] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleting the datastore file [datastore1] d5a6a189-0a7d-49ba-acab-35a244cf76eb {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.248971] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d5d4172-68dc-4e6d-ba7c-dc0c4ae16e00 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.255770] env[69992]: DEBUG oslo_vmware.api [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1403.255770] env[69992]: value = "task-2898057" [ 1403.255770] env[69992]: _type = "Task" [ 1403.255770] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.267587] env[69992]: DEBUG oslo_vmware.api [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898057, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.336279] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.369198] env[69992]: DEBUG nova.compute.manager [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1403.369420] env[69992]: DEBUG nova.compute.manager [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing instance network info cache due to event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1403.369716] env[69992]: DEBUG oslo_concurrency.lockutils [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.369858] env[69992]: DEBUG oslo_concurrency.lockutils [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1403.369987] env[69992]: DEBUG nova.network.neutron [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1403.444610] env[69992]: DEBUG nova.scheduler.client.report [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1403.523774] env[69992]: INFO nova.network.neutron [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Port 3d571c52-27cf-411e-86f3-279b842e93ca from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1403.525581] env[69992]: DEBUG nova.network.neutron [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [{"id": "c35bf17a-173c-4013-b8e4-85b2415e8860", "address": "fa:16:3e:e2:6a:23", "network": {"id": "7c8e9b14-bcc2-45f2-8b37-5f478b75057e", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1737133270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc6fa4e45f4c47c49d67e6efe2eb7a50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35bf17a-17", "ovs_interfaceid": "c35bf17a-173c-4013-b8e4-85b2415e8860", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.609133] env[69992]: DEBUG nova.compute.manager [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1403.609458] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.610900] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19afa1ae-05aa-4ba7-a494-2e2da03b95ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.623654] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1403.623654] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898055, 'name': CreateVM_Task, 'duration_secs': 0.399986} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.624023] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc7ba7f6-42db-4e7b-8b6d-3817d90e8fd4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.625256] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1403.626027] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.626190] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1403.626429] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1403.627031] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e3ca145-ca60-464a-a803-97b67f0eefce {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.632314] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1403.632314] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]526aab22-b09d-991f-ced8-3b08feb9cae3" [ 1403.632314] env[69992]: _type = "Task" [ 1403.632314] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.632559] env[69992]: DEBUG oslo_vmware.api [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1403.632559] env[69992]: value = "task-2898058" [ 1403.632559] env[69992]: _type = "Task" [ 1403.632559] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.645720] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526aab22-b09d-991f-ced8-3b08feb9cae3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.649335] env[69992]: DEBUG oslo_vmware.api [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.665665] env[69992]: DEBUG oslo_concurrency.lockutils [None req-04c9d42f-bd4d-4015-b9ab-54760c5777ad tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.312s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.773184] env[69992]: DEBUG oslo_vmware.api [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898057, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223542} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.773184] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1403.773184] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1403.773184] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1403.773184] env[69992]: INFO nova.compute.manager [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1403.773184] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1403.773184] env[69992]: DEBUG nova.compute.manager [-] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1403.773184] env[69992]: DEBUG nova.network.neutron [-] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1403.831306] env[69992]: DEBUG oslo_vmware.api [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898054, 'name': PowerOnVM_Task, 'duration_secs': 0.709229} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.833428] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1403.833690] env[69992]: INFO nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1403.833972] env[69992]: DEBUG nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1403.834769] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6c2c95-a696-4fb8-a309-c129ef086e28 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.951016] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.303s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.953552] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.047s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.003032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.003032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.003032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.003032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.003032] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.004892] env[69992]: INFO nova.compute.manager [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Terminating instance [ 1404.008925] env[69992]: INFO nova.network.neutron [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating port 0042c1e4-d906-4261-a18e-ce232533cbdd with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1404.030085] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Releasing lock "refresh_cache-fe3624b0-7d4a-4a16-83e3-3f28c2a74006" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.066470] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.066732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.066952] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.067205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.067414] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.069491] env[69992]: INFO nova.compute.manager [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Terminating instance [ 1404.125032] env[69992]: DEBUG nova.network.neutron [-] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.150834] env[69992]: DEBUG oslo_vmware.api [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898058, 'name': PowerOffVM_Task, 'duration_secs': 0.320459} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.151162] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]526aab22-b09d-991f-ced8-3b08feb9cae3, 'name': SearchDatastore_Task, 'duration_secs': 0.024624} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.151428] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1404.151636] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1404.151968] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.152246] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.152553] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.152725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.152908] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.153523] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ebc1c25-d93d-4917-8194-12ed19f2b8d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.155144] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43837985-bf83-4d44-a05c-1a10d82ecccb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.168685] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.168900] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.169695] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa5ab369-d2c6-410e-bb87-3fbd1548165d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.175677] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1404.175677] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52216525-c204-e1b9-27b0-d82309f939d7" [ 1404.175677] env[69992]: _type = "Task" [ 1404.175677] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.184498] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52216525-c204-e1b9-27b0-d82309f939d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.229999] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1404.230269] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1404.231090] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleting the datastore file [datastore2] fe3624b0-7d4a-4a16-83e3-3f28c2a74006 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1404.231090] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5660a08d-5811-4208-aa04-955ef3c57143 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.237117] env[69992]: DEBUG oslo_vmware.api [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1404.237117] env[69992]: value = "task-2898060" [ 1404.237117] env[69992]: _type = "Task" [ 1404.237117] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.245572] env[69992]: DEBUG oslo_vmware.api [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.246478] env[69992]: DEBUG nova.network.neutron [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updated VIF entry in instance network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.246835] env[69992]: DEBUG nova.network.neutron [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.353464] env[69992]: INFO nova.compute.manager [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Took 18.41 seconds to build instance. [ 1404.511878] env[69992]: DEBUG nova.compute.manager [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1404.512163] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1404.513261] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b8fa4e-2b97-4ffb-a3ea-7a7ba738206d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.521529] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.521800] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e47fdca-6dc4-468d-ba79-61997a50c4f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.528621] env[69992]: DEBUG oslo_vmware.api [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1404.528621] env[69992]: value = "task-2898061" [ 1404.528621] env[69992]: _type = "Task" [ 1404.528621] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.534659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4377c182-c439-4c73-8b38-5f728a5224ed tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "interface-fe3624b0-7d4a-4a16-83e3-3f28c2a74006-3d571c52-27cf-411e-86f3-279b842e93ca" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.982s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.538304] env[69992]: DEBUG oslo_vmware.api [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.564263] env[69992]: DEBUG nova.network.neutron [-] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.573204] env[69992]: DEBUG nova.compute.manager [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1404.573520] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1404.574462] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0906a64f-3756-4276-916d-a34f568bc7dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.583216] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.583216] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5bdbaa8-f03d-4845-af84-5c756bc4c12f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.589723] env[69992]: DEBUG oslo_vmware.api [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1404.589723] env[69992]: value = "task-2898062" [ 1404.589723] env[69992]: _type = "Task" [ 1404.589723] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.598361] env[69992]: DEBUG oslo_vmware.api [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.627817] env[69992]: INFO nova.compute.manager [-] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Took 1.48 seconds to deallocate network for instance. [ 1404.690426] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52216525-c204-e1b9-27b0-d82309f939d7, 'name': SearchDatastore_Task, 'duration_secs': 0.012643} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.691730] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8052a59f-974a-4a47-b4ac-d07d11d979dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.699844] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1404.699844] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e2dad9-24f2-02a1-91ba-8145b413576c" [ 1404.699844] env[69992]: _type = "Task" [ 1404.699844] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.712064] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e2dad9-24f2-02a1-91ba-8145b413576c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.746844] env[69992]: DEBUG oslo_vmware.api [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198738} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.747248] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.747458] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1404.747642] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1404.747819] env[69992]: INFO nova.compute.manager [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1404.748080] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1404.748286] env[69992]: DEBUG nova.compute.manager [-] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1404.748381] env[69992]: DEBUG nova.network.neutron [-] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1404.750462] env[69992]: DEBUG oslo_concurrency.lockutils [req-ad222e64-b42f-4c67-a761-5e478102ff11 req-c6147ceb-5290-4004-afd1-3e689504297e service nova] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.855398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96acc1ee-72e5-4b6f-951e-94e2ae5d1ac3 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.922s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.966524] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=69992) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fcbe1142-72dc-4a02-af9b-e03a2031a247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e95e47c2-d82e-4153-8d16-7b65d992e91a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 6ccc70f5-4857-4af3-99a1-f60ec35aebaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 9464339a-b760-47e9-bc75-e88ce18bf71b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fe3624b0-7d4a-4a16-83e3-3f28c2a74006 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 5c8b5f76-918a-44ac-b5b4-5f5f252da936 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 08869f38-9609-4f7f-9110-2f26fd1cb3f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance d5a6a189-0a7d-49ba-acab-35a244cf76eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e9018928-5237-4ba1-8c18-9ff1ec64a79c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance fc769b20-222e-4ff0-8ffd-7b24e4658b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance b7af455d-a3a7-480f-b778-9eb3724fa6f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.989155] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.039764] env[69992]: DEBUG oslo_vmware.api [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898061, 'name': PowerOffVM_Task, 'duration_secs': 0.278867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.040044] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.040229] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1405.040504] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f494197f-8078-4040-99b5-f2eaf73e3999 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.066777] env[69992]: INFO nova.compute.manager [-] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Took 1.29 seconds to deallocate network for instance. [ 1405.099824] env[69992]: DEBUG oslo_vmware.api [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898062, 'name': PowerOffVM_Task, 'duration_secs': 0.234982} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.100128] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.100222] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1405.100544] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3eefad1e-d28e-40cc-82dd-f65f68a4c8e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.135022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1405.165179] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1405.165401] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1405.165581] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Deleting the datastore file [datastore2] fc769b20-222e-4ff0-8ffd-7b24e4658b14 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1405.165882] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b36a5ee-b7dd-4bcd-a1ec-fded153473f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.172880] env[69992]: DEBUG oslo_vmware.api [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for the task: (returnval){ [ 1405.172880] env[69992]: value = "task-2898065" [ 1405.172880] env[69992]: _type = "Task" [ 1405.172880] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.181661] env[69992]: DEBUG oslo_vmware.api [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.209396] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e2dad9-24f2-02a1-91ba-8145b413576c, 'name': SearchDatastore_Task, 'duration_secs': 0.013566} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.209713] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.209944] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.210213] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2109ee17-a508-454a-9e25-437205acc342 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.215046] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1405.215334] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1405.215428] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Deleting the datastore file [datastore1] e9018928-5237-4ba1-8c18-9ff1ec64a79c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1405.215705] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58893319-d424-4e1e-be67-bcc14367bfee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.218482] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1405.218482] env[69992]: value = "task-2898066" [ 1405.218482] env[69992]: _type = "Task" [ 1405.218482] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.223511] env[69992]: DEBUG oslo_vmware.api [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1405.223511] env[69992]: value = "task-2898067" [ 1405.223511] env[69992]: _type = "Task" [ 1405.223511] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.229571] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.234577] env[69992]: DEBUG oslo_vmware.api [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.403916] env[69992]: DEBUG nova.compute.manager [req-106fdd7b-3a20-479a-b9da-2a34d47996ee req-8bfc4e75-0103-460f-8ea6-5e411574feab service nova] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Received event network-vif-deleted-c41aefad-ecba-4fa1-ae2e-2586734ffa8a {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1405.404923] env[69992]: DEBUG nova.compute.manager [req-106fdd7b-3a20-479a-b9da-2a34d47996ee req-8bfc4e75-0103-460f-8ea6-5e411574feab service nova] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Received event network-vif-deleted-3c8deaad-b3d5-4d3e-b86c-1fbc4e767b64 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1405.491871] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 904b8020-3060-4611-bdd4-650e288d69fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.492604] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration b98c3a7b-4cb1-4393-bfc7-46c47f19ce94 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1405.495135] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance d50d7460-2b70-45bc-940f-7d45f329fa1c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.495135] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1405.495135] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1405.571649] env[69992]: DEBUG nova.compute.manager [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1405.577816] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1405.582521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.582737] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1405.582923] env[69992]: DEBUG nova.network.neutron [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.683073] env[69992]: DEBUG oslo_vmware.api [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Task: {'id': task-2898065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15378} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.683347] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1405.683642] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1405.683893] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1405.684103] env[69992]: INFO nova.compute.manager [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1405.684366] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1405.684573] env[69992]: DEBUG nova.compute.manager [-] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1405.684669] env[69992]: DEBUG nova.network.neutron [-] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1405.739698] env[69992]: DEBUG oslo_vmware.api [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330661} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.739993] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898066, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.740286] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1405.740503] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1405.740717] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1405.740927] env[69992]: INFO nova.compute.manager [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1405.741221] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1405.741451] env[69992]: DEBUG nova.compute.manager [-] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1405.741590] env[69992]: DEBUG nova.network.neutron [-] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1405.788164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb517e71-508b-4658-8d5f-818a9a34bd3a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.796164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eda83c-2441-4d26-99df-d1272b4d9530 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.830175] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cffcfad-93ed-4ca0-8b3d-6ebda543c2cb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.836825] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebeefeb2-5e57-408a-b090-147f63ab0cd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.850081] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.011145] env[69992]: DEBUG nova.compute.manager [req-240b1970-3aa8-412a-9b9f-122502f8824a req-a5fc5543-f5c4-44ad-8b66-9d6fddc2ddf0 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Received event network-vif-deleted-c35bf17a-173c-4013-b8e4-85b2415e8860 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1406.011145] env[69992]: INFO nova.compute.manager [req-240b1970-3aa8-412a-9b9f-122502f8824a req-a5fc5543-f5c4-44ad-8b66-9d6fddc2ddf0 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Neutron deleted interface c35bf17a-173c-4013-b8e4-85b2415e8860; detaching it from the instance and deleting it from the info cache [ 1406.011145] env[69992]: DEBUG nova.network.neutron [req-240b1970-3aa8-412a-9b9f-122502f8824a req-a5fc5543-f5c4-44ad-8b66-9d6fddc2ddf0 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.102609] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.235507] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54847} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.236573] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1406.237116] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1406.239217] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6ac28f5-d318-4e18-9966-cf7dc6b91666 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.246389] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1406.246389] env[69992]: value = "task-2898068" [ 1406.246389] env[69992]: _type = "Task" [ 1406.246389] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.254515] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.354114] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1406.413341] env[69992]: DEBUG nova.network.neutron [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.491906] env[69992]: DEBUG nova.network.neutron [-] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.514527] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cc50999-0e4f-4ba4-9d66-39195d3e9d33 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.529444] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eee67d3-f3c5-4df4-9b2a-80ac0fcc8d4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.567143] env[69992]: DEBUG nova.compute.manager [req-240b1970-3aa8-412a-9b9f-122502f8824a req-a5fc5543-f5c4-44ad-8b66-9d6fddc2ddf0 service nova] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Detach interface failed, port_id=c35bf17a-173c-4013-b8e4-85b2415e8860, reason: Instance fe3624b0-7d4a-4a16-83e3-3f28c2a74006 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1406.586360] env[69992]: DEBUG nova.network.neutron [-] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.755250] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069952} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.755350] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1406.756149] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2e814f-42d4-4065-97bc-63118b728049 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.778167] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1406.778838] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db4da1e5-e8fc-4464-aa41-d2ed6abd327b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.798558] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1406.798558] env[69992]: value = "task-2898069" [ 1406.798558] env[69992]: _type = "Task" [ 1406.798558] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.806577] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.860302] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1406.860471] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.907s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.860709] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.158s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.860919] env[69992]: DEBUG nova.objects.instance [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1406.863628] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.863807] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1406.918779] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1406.947853] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8d67bcb9a72dffcb43a2621360f85c0a',container_format='bare',created_at=2025-03-10T17:54:38Z,direct_url=,disk_format='vmdk',id=697d1ada-cc80-456a-9a40-098dcf5fc096,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1095928358-shelved',owner='1471cdd6671b4e6ebc23b8fc2b120b63',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-03-10T17:54:53Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1406.948159] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.948367] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1406.948582] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.948778] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1406.948943] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1406.949222] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1406.949409] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1406.949604] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1406.949805] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1406.950033] env[69992]: DEBUG nova.virt.hardware [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1406.951027] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f3643a-2810-4db7-9d41-f856466779c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.959609] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f32a0a2-18e4-41a5-b28d-9ac595b0ee79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.978212] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:45:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6fab536-1e48-4d07-992a-076f0e6d089c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0042c1e4-d906-4261-a18e-ce232533cbdd', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1406.986079] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1406.986736] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1406.987104] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6dde834c-e688-4044-a18e-17bef6fc3845 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.001793] env[69992]: INFO nova.compute.manager [-] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Took 2.25 seconds to deallocate network for instance. [ 1407.009193] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1407.009193] env[69992]: value = "task-2898070" [ 1407.009193] env[69992]: _type = "Task" [ 1407.009193] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.018797] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898070, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.088963] env[69992]: INFO nova.compute.manager [-] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Took 1.40 seconds to deallocate network for instance. [ 1407.133850] env[69992]: DEBUG nova.network.neutron [-] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.309948] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898069, 'name': ReconfigVM_Task, 'duration_secs': 0.317495} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.309948] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Reconfigured VM instance instance-0000006f to attach disk [datastore1] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1407.310319] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-167db69c-44e8-4d9b-928f-5b257a0d29b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.317356] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1407.317356] env[69992]: value = "task-2898071" [ 1407.317356] env[69992]: _type = "Task" [ 1407.317356] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.326268] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898071, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.384205] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] There are 59 instances to clean {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1407.384205] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 921f1e1a-6de3-404d-8970-8545db0128f2] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1407.488575] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-vif-plugged-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1407.488849] env[69992]: DEBUG oslo_concurrency.lockutils [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.489079] env[69992]: DEBUG oslo_concurrency.lockutils [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.489255] env[69992]: DEBUG oslo_concurrency.lockutils [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.489427] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] No waiting events found dispatching network-vif-plugged-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1407.489613] env[69992]: WARNING nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received unexpected event network-vif-plugged-0042c1e4-d906-4261-a18e-ce232533cbdd for instance with vm_state shelved_offloaded and task_state spawning. [ 1407.489802] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-changed-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1407.489961] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Refreshing instance network info cache due to event network-changed-0042c1e4-d906-4261-a18e-ce232533cbdd. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1407.490161] env[69992]: DEBUG oslo_concurrency.lockutils [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.490297] env[69992]: DEBUG oslo_concurrency.lockutils [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1407.490452] env[69992]: DEBUG nova.network.neutron [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Refreshing network info cache for port 0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1407.507923] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.519545] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898070, 'name': CreateVM_Task, 'duration_secs': 0.469547} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.519698] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1407.520340] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.520502] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1407.520903] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1407.521174] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8db5fc55-0ace-4ecb-b150-0e12b26ef7ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.525703] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1407.525703] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a0c7f7-3914-7c0c-c92e-0a70fa63265d" [ 1407.525703] env[69992]: _type = "Task" [ 1407.525703] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.533494] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a0c7f7-3914-7c0c-c92e-0a70fa63265d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.595649] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.636156] env[69992]: INFO nova.compute.manager [-] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Took 1.89 seconds to deallocate network for instance. [ 1407.827323] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898071, 'name': Rename_Task, 'duration_secs': 0.151818} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.827590] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.827828] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b874511-407b-4ce8-a9d9-56670548a703 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.835109] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1407.835109] env[69992]: value = "task-2898072" [ 1407.835109] env[69992]: _type = "Task" [ 1407.835109] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.844199] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.886698] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1f91f382-afda-4307-b614-e3ddec010318 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.887917] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: bf45e20c-0fd7-4a27-924c-0ae56c6cff82] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1407.889762] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.777s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.890897] env[69992]: INFO nova.compute.claims [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.035016] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1408.035364] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Processing image 697d1ada-cc80-456a-9a40-098dcf5fc096 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1408.035518] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.035676] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1408.035855] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1408.036114] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4e5e776-aee2-4cd0-9570-97c9a9eb9431 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.046814] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1408.046990] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1408.047690] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d6d042a-66b6-4fa9-8125-9238d5d25f6f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.052653] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1408.052653] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52b67a0c-3d85-1803-9976-4cdca150ef81" [ 1408.052653] env[69992]: _type = "Task" [ 1408.052653] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.059690] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52b67a0c-3d85-1803-9976-4cdca150ef81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.142494] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.185553] env[69992]: DEBUG nova.network.neutron [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updated VIF entry in instance network info cache for port 0042c1e4-d906-4261-a18e-ce232533cbdd. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1408.185851] env[69992]: DEBUG nova.network.neutron [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.344909] env[69992]: DEBUG oslo_vmware.api [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898072, 'name': PowerOnVM_Task, 'duration_secs': 0.491697} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.345162] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.345366] env[69992]: INFO nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Took 7.51 seconds to spawn the instance on the hypervisor. [ 1408.345543] env[69992]: DEBUG nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1408.346311] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16d5ac3-d96d-4e98-a840-bfc96013fefb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.393697] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: b72eb094-b0fa-4e6f-bc29-c110692c7204] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1408.563857] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1408.564120] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Fetch image to [datastore2] OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef/OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1408.564309] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Downloading stream optimized image 697d1ada-cc80-456a-9a40-098dcf5fc096 to [datastore2] OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef/OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef.vmdk on the data store datastore2 as vApp {{(pid=69992) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1408.564508] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Downloading image file data 697d1ada-cc80-456a-9a40-098dcf5fc096 to the ESX as VM named 'OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef' {{(pid=69992) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1408.640066] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1408.640066] env[69992]: value = "resgroup-9" [ 1408.640066] env[69992]: _type = "ResourcePool" [ 1408.640066] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1408.640390] env[69992]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-360cb914-9198-4674-9f1f-b8de4228a157 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.661938] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lease: (returnval){ [ 1408.661938] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284304d-f4cf-8ce9-c26c-7fa3afa714ac" [ 1408.661938] env[69992]: _type = "HttpNfcLease" [ 1408.661938] env[69992]: } obtained for vApp import into resource pool (val){ [ 1408.661938] env[69992]: value = "resgroup-9" [ 1408.661938] env[69992]: _type = "ResourcePool" [ 1408.661938] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1408.662245] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the lease: (returnval){ [ 1408.662245] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284304d-f4cf-8ce9-c26c-7fa3afa714ac" [ 1408.662245] env[69992]: _type = "HttpNfcLease" [ 1408.662245] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1408.668177] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1408.668177] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284304d-f4cf-8ce9-c26c-7fa3afa714ac" [ 1408.668177] env[69992]: _type = "HttpNfcLease" [ 1408.668177] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1408.688250] env[69992]: DEBUG oslo_concurrency.lockutils [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1408.688505] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Received event network-vif-deleted-e1406cdb-0572-4e8a-9429-723f364d855d {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1408.688717] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-vif-deleted-92f378ec-95f1-4743-9f83-12cb0a249cd0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1408.688944] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-vif-deleted-52bbb2d4-ddca-4b0b-951c-b68eb107fd53 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1408.689169] env[69992]: DEBUG nova.compute.manager [req-f764cb97-1f00-4015-8247-e1bb5830cea4 req-624d9079-cb5a-46e1-a4ef-237c4a1deea3 service nova] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Received event network-vif-deleted-38bb614b-f887-4e3a-996d-e5b3b1141511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1408.862511] env[69992]: INFO nova.compute.manager [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Took 15.32 seconds to build instance. [ 1408.898088] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 91e35dbb-d611-4cd4-9ba8-7c4cc9aa49c3] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1409.121757] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63d1789-1f4e-4b85-a2ec-6c54faf4dce2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.131364] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31746e6-a9fb-4ea7-9f82-217bb8501bd7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.168085] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36b5dff-7ca8-4afa-a854-c4413ecc543a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.174069] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1409.174069] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284304d-f4cf-8ce9-c26c-7fa3afa714ac" [ 1409.174069] env[69992]: _type = "HttpNfcLease" [ 1409.174069] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1409.177268] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca06e6fa-ca70-465c-8da2-7533d3ae7c4a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.191717] env[69992]: DEBUG nova.compute.provider_tree [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.365156] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7011ed74-b39c-4d13-9b36-b53ce073ff4b tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.836s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1409.403993] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 3f44442d-82b1-4669-8d65-0088d4a9babb] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1409.661783] env[69992]: DEBUG nova.compute.manager [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1409.662142] env[69992]: DEBUG nova.compute.manager [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing instance network info cache due to event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1409.662142] env[69992]: DEBUG oslo_concurrency.lockutils [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.662270] env[69992]: DEBUG oslo_concurrency.lockutils [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1409.662450] env[69992]: DEBUG nova.network.neutron [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.673213] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1409.673213] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284304d-f4cf-8ce9-c26c-7fa3afa714ac" [ 1409.673213] env[69992]: _type = "HttpNfcLease" [ 1409.673213] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1409.673738] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1409.673738] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284304d-f4cf-8ce9-c26c-7fa3afa714ac" [ 1409.673738] env[69992]: _type = "HttpNfcLease" [ 1409.673738] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1409.674536] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e454515f-9835-48a7-ace3-e416f8c5bcc6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.682928] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529c0688-5529-27d1-e46b-5a65c8020ca8/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1409.683131] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529c0688-5529-27d1-e46b-5a65c8020ca8/disk-0.vmdk. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1409.740702] env[69992]: DEBUG nova.scheduler.client.report [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1409.749100] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c3c2e7a2-dbcc-41f0-946d-83da46022533 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.906667] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: cc8a809a-1a3b-4dad-a74b-d2f8d267b476] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1410.247214] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.248534] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1410.250265] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.115s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.250489] env[69992]: DEBUG nova.objects.instance [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'resources' on Instance uuid 5c8b5f76-918a-44ac-b5b4-5f5f252da936 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1410.407143] env[69992]: DEBUG nova.network.neutron [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updated VIF entry in instance network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.407534] env[69992]: DEBUG nova.network.neutron [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.410035] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 2b89e218-81cc-49fc-a80a-35dde48bdd5d] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1410.754374] env[69992]: DEBUG nova.compute.utils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1410.758573] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1410.758757] env[69992]: DEBUG nova.network.neutron [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.800180] env[69992]: DEBUG nova.policy [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8589a47b616643f5a513f62354529eda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57eaf44c4ac5491380b329e1e86e9454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1410.913375] env[69992]: DEBUG oslo_concurrency.lockutils [req-467589a3-4048-4b42-ba41-f595e48bb03d req-56a85e29-f912-4afd-968a-fc6de9ede17d service nova] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1410.914012] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 48558980-2800-4f5b-80ce-d59552445c3f] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1410.943281] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1410.943615] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529c0688-5529-27d1-e46b-5a65c8020ca8/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1410.944639] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fac8fcf-f6ce-4097-ae0d-adad6b0aa919 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.952430] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529c0688-5529-27d1-e46b-5a65c8020ca8/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1410.952738] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529c0688-5529-27d1-e46b-5a65c8020ca8/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1410.952998] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e77b15d0-2105-4a18-9692-b0c1fdb09cc2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.997816] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318d6144-6c67-4e48-84f3-37052112f070 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.005835] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a905e6b-6168-4015-8bec-3f926269a68d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.048510] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6686c5b-f8ee-463f-b9f1-6ceae7184743 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.059931] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9199e7b7-354b-4868-8d5f-0f07518d596b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.085137] env[69992]: DEBUG nova.compute.provider_tree [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.105515] env[69992]: DEBUG nova.network.neutron [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Successfully created port: e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.162535] env[69992]: DEBUG oslo_vmware.rw_handles [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529c0688-5529-27d1-e46b-5a65c8020ca8/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1411.162918] env[69992]: INFO nova.virt.vmwareapi.images [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Downloaded image file data 697d1ada-cc80-456a-9a40-098dcf5fc096 [ 1411.164086] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c70f9a-abf1-4168-ae71-c1cabfa35001 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.183135] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93195dec-e16b-4da3-98e3-d7a7febe584e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.214300] env[69992]: INFO nova.virt.vmwareapi.images [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] The imported VM was unregistered [ 1411.217259] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1411.217522] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Creating directory with path [datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1411.217828] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eeab434d-c1ac-432d-a471-0fd38d72108a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.245672] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Created directory with path [datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1411.245820] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef/OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef.vmdk to [datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk. {{(pid=69992) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1411.246136] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7f6684ed-36f6-43d9-b6c7-45c34924c12b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.252727] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1411.252727] env[69992]: value = "task-2898075" [ 1411.252727] env[69992]: _type = "Task" [ 1411.252727] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.261412] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1411.264542] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.417860] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 45a00234-7ebf-4835-bad3-30474bb27148] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1411.589517] env[69992]: DEBUG nova.scheduler.client.report [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1411.763401] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.921677] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 1cf5a6d2-8ec9-429a-9c31-eb3c699389d9] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1412.095325] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.098575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.521s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.099139] env[69992]: DEBUG nova.objects.instance [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lazy-loading 'resources' on Instance uuid d5a6a189-0a7d-49ba-acab-35a244cf76eb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.122766] env[69992]: INFO nova.scheduler.client.report [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted allocations for instance 5c8b5f76-918a-44ac-b5b4-5f5f252da936 [ 1412.264075] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.273761] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1412.306396] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1412.306735] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.306929] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1412.307303] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.307485] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1412.307658] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1412.307975] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1412.308172] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1412.308352] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1412.308522] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1412.308720] env[69992]: DEBUG nova.virt.hardware [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1412.310242] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03419de-ac47-4ab7-8caf-870bbc1ee160 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.321097] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a8c3a4-0eab-434c-b3c8-02adc35b639a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.425758] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: d7c85c02-d75e-4af0-bd1e-9bbf5a7263d4] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1412.558502] env[69992]: DEBUG nova.compute.manager [req-d94d496e-5f42-4892-ba66-5d25681277aa req-a3b269c3-c4d1-45a5-927a-c93c4d5cca03 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Received event network-vif-plugged-e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1412.558801] env[69992]: DEBUG oslo_concurrency.lockutils [req-d94d496e-5f42-4892-ba66-5d25681277aa req-a3b269c3-c4d1-45a5-927a-c93c4d5cca03 service nova] Acquiring lock "904b8020-3060-4611-bdd4-650e288d69fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.558938] env[69992]: DEBUG oslo_concurrency.lockutils [req-d94d496e-5f42-4892-ba66-5d25681277aa req-a3b269c3-c4d1-45a5-927a-c93c4d5cca03 service nova] Lock "904b8020-3060-4611-bdd4-650e288d69fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.559127] env[69992]: DEBUG oslo_concurrency.lockutils [req-d94d496e-5f42-4892-ba66-5d25681277aa req-a3b269c3-c4d1-45a5-927a-c93c4d5cca03 service nova] Lock "904b8020-3060-4611-bdd4-650e288d69fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.559305] env[69992]: DEBUG nova.compute.manager [req-d94d496e-5f42-4892-ba66-5d25681277aa req-a3b269c3-c4d1-45a5-927a-c93c4d5cca03 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] No waiting events found dispatching network-vif-plugged-e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1412.559462] env[69992]: WARNING nova.compute.manager [req-d94d496e-5f42-4892-ba66-5d25681277aa req-a3b269c3-c4d1-45a5-927a-c93c4d5cca03 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Received unexpected event network-vif-plugged-e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 for instance with vm_state building and task_state spawning. [ 1412.607768] env[69992]: DEBUG nova.network.neutron [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Successfully updated port: e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1412.633523] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b37686a7-393d-459f-b484-d4a2f9e7b9ab tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5c8b5f76-918a-44ac-b5b4-5f5f252da936" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.618s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.767230] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.827433] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582db161-76a2-42df-a812-ea47622c6748 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.838057] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb16287-310e-4ebe-80b3-8a55c46a16db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.881752] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4bff62-9a25-47a1-a5c0-526f12cb125d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.893227] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da80c3a-e182-4fa0-b6db-0371bd264727 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.915228] env[69992]: DEBUG nova.compute.provider_tree [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.930152] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: be28d7a8-6566-45aa-8b4c-08c7eb29864d] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1413.110598] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "refresh_cache-904b8020-3060-4611-bdd4-650e288d69fd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.110831] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "refresh_cache-904b8020-3060-4611-bdd4-650e288d69fd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1413.111015] env[69992]: DEBUG nova.network.neutron [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1413.267399] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.418711] env[69992]: DEBUG nova.scheduler.client.report [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1413.434108] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: ae681491-c03e-486f-b763-0ebfa4dcd669] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1413.644867] env[69992]: DEBUG nova.network.neutron [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1413.735068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.735068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.767800] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.811023] env[69992]: DEBUG nova.network.neutron [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Updating instance_info_cache with network_info: [{"id": "e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65", "address": "fa:16:3e:14:70:86", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6aa5742-9e", "ovs_interfaceid": "e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.923811] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1413.926533] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.824s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1413.937891] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: c6e4f19b-7264-4eea-a472-f64a68d4df22] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1413.951347] env[69992]: INFO nova.scheduler.client.report [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleted allocations for instance d5a6a189-0a7d-49ba-acab-35a244cf76eb [ 1414.238601] env[69992]: INFO nova.compute.manager [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Detaching volume 23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74 [ 1414.267505] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.274242] env[69992]: INFO nova.virt.block_device [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Attempting to driver detach volume 23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74 from mountpoint /dev/sdb [ 1414.274424] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1414.274585] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582089', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'name': 'volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e95e47c2-d82e-4153-8d16-7b65d992e91a', 'attached_at': '', 'detached_at': '', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'serial': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1414.275573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be071b76-1df6-4605-b58d-1ffc452ad783 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.299018] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e740a50-b295-42df-b309-c9014f18909d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.306821] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab648cb0-328c-4649-a2bd-27e1d297ae81 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.326789] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "refresh_cache-904b8020-3060-4611-bdd4-650e288d69fd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1414.327114] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Instance network_info: |[{"id": "e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65", "address": "fa:16:3e:14:70:86", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6aa5742-9e", "ovs_interfaceid": "e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1414.327611] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:70:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.334974] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1414.335626] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa6ecd7-895f-4bc5-8300-1e1a1a5704ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.338061] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1414.338461] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dd9d4d7-91ed-43e5-bed6-25570a62edf8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.364711] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] The volume has not been displaced from its original location: [datastore2] volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74/volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1414.369761] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1414.371082] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b712117-24ab-4a03-9578-37fe549aaab2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.383393] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.383393] env[69992]: value = "task-2898076" [ 1414.383393] env[69992]: _type = "Task" [ 1414.383393] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.388512] env[69992]: DEBUG oslo_vmware.api [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1414.388512] env[69992]: value = "task-2898077" [ 1414.388512] env[69992]: _type = "Task" [ 1414.388512] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.394046] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898076, 'name': CreateVM_Task} progress is 15%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.399381] env[69992]: DEBUG oslo_vmware.api [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898077, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.432617] env[69992]: INFO nova.compute.claims [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1414.441852] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 1f9f3bdf-c806-4ac9-85f3-6b33b983fafe] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1414.460729] env[69992]: DEBUG oslo_concurrency.lockutils [None req-74696df0-1a52-407a-9d7e-9db7946565b4 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "d5a6a189-0a7d-49ba-acab-35a244cf76eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.310s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.591218] env[69992]: DEBUG nova.compute.manager [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Received event network-changed-e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1414.591421] env[69992]: DEBUG nova.compute.manager [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Refreshing instance network info cache due to event network-changed-e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1414.591632] env[69992]: DEBUG oslo_concurrency.lockutils [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] Acquiring lock "refresh_cache-904b8020-3060-4611-bdd4-650e288d69fd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.591778] env[69992]: DEBUG oslo_concurrency.lockutils [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] Acquired lock "refresh_cache-904b8020-3060-4611-bdd4-650e288d69fd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.591951] env[69992]: DEBUG nova.network.neutron [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Refreshing network info cache for port e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1414.767304] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898075, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.24127} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.767499] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef/OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef.vmdk to [datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk. [ 1414.767692] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Cleaning up location [datastore2] OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1414.767860] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_00dd3969-5fc4-438d-9611-27f894f071ef {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1414.768147] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48393772-1fc7-43ef-a1e1-66fd1fdee4b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.774782] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1414.774782] env[69992]: value = "task-2898078" [ 1414.774782] env[69992]: _type = "Task" [ 1414.774782] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.784421] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.895959] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898076, 'name': CreateVM_Task, 'duration_secs': 0.446232} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.896486] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1414.897216] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.897382] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.897708] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1414.900585] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e35cefc2-d0b9-4819-b6bf-d94de17a57f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.902072] env[69992]: DEBUG oslo_vmware.api [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898077, 'name': ReconfigVM_Task, 'duration_secs': 0.327356} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.902319] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1414.908018] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-874174b1-996a-4373-9d14-a69b0b026248 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.918049] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1414.918049] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52486285-c5e1-bf67-e77c-a898e2f9ecc1" [ 1414.918049] env[69992]: _type = "Task" [ 1414.918049] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.922321] env[69992]: DEBUG oslo_vmware.api [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1414.922321] env[69992]: value = "task-2898079" [ 1414.922321] env[69992]: _type = "Task" [ 1414.922321] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.928183] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52486285-c5e1-bf67-e77c-a898e2f9ecc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.932802] env[69992]: DEBUG oslo_vmware.api [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898079, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.940466] env[69992]: INFO nova.compute.resource_tracker [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating resource usage from migration b98c3a7b-4cb1-4393-bfc7-46c47f19ce94 [ 1414.944435] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: dedba037-48a7-4083-925d-5f34e2a27362] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1415.123635] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e94ac1-142d-4a77-8210-a280fcafef9e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.132895] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5ab927-d1d7-428d-8b68-ea26930ae9d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.165028] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13714a0-e2ee-4464-a016-ea0f44575a82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.172697] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76e8eb6-392d-446b-ba0d-d1b780e9eab6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.186124] env[69992]: DEBUG nova.compute.provider_tree [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.287745] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16859} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.288052] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1415.288209] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.288861] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk to [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1415.288861] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2297d10f-f24b-48bc-8d75-bf7782f4bfde {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.295586] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1415.295586] env[69992]: value = "task-2898080" [ 1415.295586] env[69992]: _type = "Task" [ 1415.295586] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.304378] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.307680] env[69992]: DEBUG nova.network.neutron [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Updated VIF entry in instance network info cache for port e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1415.308086] env[69992]: DEBUG nova.network.neutron [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Updating instance_info_cache with network_info: [{"id": "e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65", "address": "fa:16:3e:14:70:86", "network": {"id": "8276eb4a-aa4d-463a-beae-6aab3fe1a5ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-494735806-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57eaf44c4ac5491380b329e1e86e9454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6aa5742-9e", "ovs_interfaceid": "e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.431572] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52486285-c5e1-bf67-e77c-a898e2f9ecc1, 'name': SearchDatastore_Task, 'duration_secs': 0.026508} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.434575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.434808] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.435097] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.435277] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1415.435461] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.435720] env[69992]: DEBUG oslo_vmware.api [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898079, 'name': ReconfigVM_Task, 'duration_secs': 0.150247} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.435908] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81d2e279-5e22-4717-911d-667e181ed495 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.437587] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582089', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'name': 'volume-23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e95e47c2-d82e-4153-8d16-7b65d992e91a', 'attached_at': '', 'detached_at': '', 'volume_id': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74', 'serial': '23b100d9-4f7d-4d3c-9cc3-3e32f4e0fc74'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1415.448375] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.448553] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.449496] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 31109fbd-ebc0-422d-a705-7d0e59d4bbb4] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1415.451535] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63580339-e24c-4617-92ac-1bdaf6772508 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.457073] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1415.457073] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523322ef-3bb6-1211-df28-a403925ae974" [ 1415.457073] env[69992]: _type = "Task" [ 1415.457073] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.465586] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523322ef-3bb6-1211-df28-a403925ae974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.691363] env[69992]: DEBUG nova.scheduler.client.report [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1415.805052] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.811382] env[69992]: DEBUG oslo_concurrency.lockutils [req-cde44f81-3524-4add-85c2-c33acf4df0bf req-8e446768-9acb-4f69-8ff5-dbb6806496e0 service nova] Releasing lock "refresh_cache-904b8020-3060-4611-bdd4-650e288d69fd" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.898625] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.898897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.955518] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 57702674-4c96-4577-a93f-24ecffebb3a7] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1415.969083] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523322ef-3bb6-1211-df28-a403925ae974, 'name': SearchDatastore_Task, 'duration_secs': 0.021627} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.970080] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c5646e8-d2c0-4193-971b-986c46383ec9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.978677] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1415.978677] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a27092-ede6-a5c4-5508-0cff110da1a3" [ 1415.978677] env[69992]: _type = "Task" [ 1415.978677] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.989013] env[69992]: DEBUG nova.objects.instance [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'flavor' on Instance uuid e95e47c2-d82e-4153-8d16-7b65d992e91a {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.994360] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a27092-ede6-a5c4-5508-0cff110da1a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.197026] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.270s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1416.197335] env[69992]: INFO nova.compute.manager [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Migrating [ 1416.204773] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.697s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.205100] env[69992]: DEBUG nova.objects.instance [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'resources' on Instance uuid fe3624b0-7d4a-4a16-83e3-3f28c2a74006 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.307667] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.402017] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1416.463253] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7fa33d98-20b7-4162-a354-24cfea17701f] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1416.492681] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a27092-ede6-a5c4-5508-0cff110da1a3, 'name': SearchDatastore_Task, 'duration_secs': 0.048434} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.493742] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1416.494077] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 904b8020-3060-4611-bdd4-650e288d69fd/904b8020-3060-4611-bdd4-650e288d69fd.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.494374] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-327c938d-1f09-4794-a623-0b892e63ce04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.506542] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1416.506542] env[69992]: value = "task-2898081" [ 1416.506542] env[69992]: _type = "Task" [ 1416.506542] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.515596] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.718617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.718774] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.718947] env[69992]: DEBUG nova.network.neutron [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.811447] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.920635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.944184] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00bf647-51c8-466a-9df3-5fb59051f4d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.955917] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b177c28-3f42-4be7-bb9a-258c2e4cb620 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.989922] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9591b360-414b-4aa9-94b2-5b9ccb9e7d39] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1416.993067] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8b6b87-bc20-4a3a-9258-abd667f03906 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.001531] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d9d0397-a1ed-4863-80f3-d34169ed587c tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.266s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.003651] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9906714-303e-464c-a2e9-5e86c087dca8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.023741] env[69992]: DEBUG nova.compute.provider_tree [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.028418] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.312081] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.495227] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 953c0e0d-3279-444c-b631-6ebbf24e5487] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1417.519842] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.530059] env[69992]: DEBUG nova.scheduler.client.report [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1417.563596] env[69992]: DEBUG nova.network.neutron [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [{"id": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "address": "fa:16:3e:a1:2b:47", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b3a8c02-b4", "ovs_interfaceid": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.811301] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.970619] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1417.970886] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1417.971143] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1417.971366] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1417.971549] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.974043] env[69992]: INFO nova.compute.manager [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Terminating instance [ 1417.997210] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a9bf63c6-8a26-4d00-b8ff-0ea4e29a1190] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1418.019711] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898081, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.034707] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.830s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.036820] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.441s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.037056] env[69992]: DEBUG nova.objects.instance [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lazy-loading 'resources' on Instance uuid fc769b20-222e-4ff0-8ffd-7b24e4658b14 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1418.052304] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.052556] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.052720] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "e95e47c2-d82e-4153-8d16-7b65d992e91a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.052938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.053139] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.055055] env[69992]: INFO nova.compute.manager [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Terminating instance [ 1418.056963] env[69992]: INFO nova.scheduler.client.report [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted allocations for instance fe3624b0-7d4a-4a16-83e3-3f28c2a74006 [ 1418.066578] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1418.310856] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898080, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.800531} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.311159] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/697d1ada-cc80-456a-9a40-098dcf5fc096/697d1ada-cc80-456a-9a40-098dcf5fc096.vmdk to [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.311996] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ed4f64-13c0-4537-a0ee-efc74e57255b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.336428] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1418.336883] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad55715e-e332-454e-b999-a70fdb2b93e1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.358090] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1418.358090] env[69992]: value = "task-2898082" [ 1418.358090] env[69992]: _type = "Task" [ 1418.358090] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.368016] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.478135] env[69992]: DEBUG nova.compute.manager [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1418.478534] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1418.479985] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e7c84e-59aa-4844-8092-530d7526960a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.489544] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1418.489867] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23c490d4-d30e-4a8f-8fd5-b25605acff15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.497199] env[69992]: DEBUG oslo_vmware.api [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1418.497199] env[69992]: value = "task-2898083" [ 1418.497199] env[69992]: _type = "Task" [ 1418.497199] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.500868] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 451a8af1-a4a2-4c2d-932c-58955491433b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1418.507679] env[69992]: DEBUG oslo_vmware.api [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2898083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.518067] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898081, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.936745} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.518332] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 904b8020-3060-4611-bdd4-650e288d69fd/904b8020-3060-4611-bdd4-650e288d69fd.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.518570] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.518825] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e21f5a9b-15a8-45e1-9588-2ae28c6a4d02 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.524820] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1418.524820] env[69992]: value = "task-2898084" [ 1418.524820] env[69992]: _type = "Task" [ 1418.524820] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.533076] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898084, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.565518] env[69992]: DEBUG nova.compute.manager [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1418.565518] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1418.567021] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3df144f2-6f1a-4db3-8e79-4aa2ac1135bd tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "fe3624b0-7d4a-4a16-83e3-3f28c2a74006" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.474s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.568793] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2178c6-d276-4f5c-956a-a00cb48ae1cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.585075] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1418.585400] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb3b9236-82df-4050-a5cf-c796406da1d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.594732] env[69992]: DEBUG oslo_vmware.api [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1418.594732] env[69992]: value = "task-2898085" [ 1418.594732] env[69992]: _type = "Task" [ 1418.594732] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.606283] env[69992]: DEBUG oslo_vmware.api [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.723012] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d361af-ece6-4803-8b3e-fab3b3951730 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.730819] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5634b8-4aae-45ea-a85e-c01fc784f0c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.766107] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c759c14-b978-4768-b964-e943f27fccb9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.773277] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa85649e-c94b-40b9-a01e-43121a26d0a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.786228] env[69992]: DEBUG nova.compute.provider_tree [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.860604] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.860981] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.861224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.861412] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.861579] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.863728] env[69992]: INFO nova.compute.manager [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Terminating instance [ 1418.874572] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.006746] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 033d667f-5511-4254-a7e2-f8a2a94178d1] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1419.009033] env[69992]: DEBUG oslo_vmware.api [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2898083, 'name': PowerOffVM_Task, 'duration_secs': 0.276246} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.009438] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.009615] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.009856] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-746b3507-9831-4836-a10d-a19b77c047de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.035296] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898084, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069691} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.035529] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.036252] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786b3e5f-e6b7-49b2-be05-25cf714fd6f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.057224] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 904b8020-3060-4611-bdd4-650e288d69fd/904b8020-3060-4611-bdd4-650e288d69fd.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.057451] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f8d2c16-1fea-47dc-bb82-0d43601cdf99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.076693] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1419.076693] env[69992]: value = "task-2898087" [ 1419.076693] env[69992]: _type = "Task" [ 1419.076693] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.086896] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898087, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.103864] env[69992]: DEBUG oslo_vmware.api [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898085, 'name': PowerOffVM_Task, 'duration_secs': 0.242518} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.104132] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.104303] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.104548] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-effc877b-e446-4d20-93ad-4e43c38c4b4b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.180138] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.180380] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.180560] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleting the datastore file [datastore1] 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.180834] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fa73b81-966c-4e7e-9b70-a1502885967c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.187438] env[69992]: DEBUG oslo_vmware.api [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for the task: (returnval){ [ 1419.187438] env[69992]: value = "task-2898089" [ 1419.187438] env[69992]: _type = "Task" [ 1419.187438] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.196524] env[69992]: DEBUG oslo_vmware.api [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2898089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.197893] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.198155] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.198381] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleting the datastore file [datastore2] e95e47c2-d82e-4153-8d16-7b65d992e91a {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.198695] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a559c0b-2d73-40cf-a51e-a02925670f97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.208049] env[69992]: DEBUG oslo_vmware.api [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1419.208049] env[69992]: value = "task-2898090" [ 1419.208049] env[69992]: _type = "Task" [ 1419.208049] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.216124] env[69992]: DEBUG oslo_vmware.api [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.290396] env[69992]: DEBUG nova.scheduler.client.report [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1419.371064] env[69992]: DEBUG nova.compute.manager [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1419.371341] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1419.372440] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b188059-1c11-4ebf-ab10-0c780a7cf863 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.379565] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898082, 'name': ReconfigVM_Task, 'duration_secs': 0.558193} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.380307] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7/25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1419.380944] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55cc9a34-8e5e-4ba6-a28b-22f9986a4fd8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.384625] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.385186] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d56a80a-1958-4fb8-9d14-e4c0bdc3699f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.388430] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1419.388430] env[69992]: value = "task-2898091" [ 1419.388430] env[69992]: _type = "Task" [ 1419.388430] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.393022] env[69992]: DEBUG oslo_vmware.api [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1419.393022] env[69992]: value = "task-2898092" [ 1419.393022] env[69992]: _type = "Task" [ 1419.393022] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.400047] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898091, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.406987] env[69992]: DEBUG oslo_vmware.api [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.510062] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a35dd590-b5ff-4878-8aa5-8797814d8779] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1419.590072] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898087, 'name': ReconfigVM_Task, 'duration_secs': 0.305564} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.591387] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 904b8020-3060-4611-bdd4-650e288d69fd/904b8020-3060-4611-bdd4-650e288d69fd.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1419.592711] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbef66a-e744-4f9c-aa97-9a382d06a51b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.596648] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5dba491c-f1be-4245-ba4c-d952d68f7747 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.627583] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1419.633742] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1419.633742] env[69992]: value = "task-2898093" [ 1419.633742] env[69992]: _type = "Task" [ 1419.633742] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.641737] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898093, 'name': Rename_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.702954] env[69992]: DEBUG oslo_vmware.api [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Task: {'id': task-2898089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157986} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.703251] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1419.703488] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1419.703667] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1419.703842] env[69992]: INFO nova.compute.manager [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1419.704107] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1419.704302] env[69992]: DEBUG nova.compute.manager [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1419.704397] env[69992]: DEBUG nova.network.neutron [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1419.716752] env[69992]: DEBUG oslo_vmware.api [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170721} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.716980] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1419.717177] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1419.717354] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1419.717522] env[69992]: INFO nova.compute.manager [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1419.717753] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1419.717961] env[69992]: DEBUG nova.compute.manager [-] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1419.718041] env[69992]: DEBUG nova.network.neutron [-] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1419.796120] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1419.798561] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.656s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.798561] env[69992]: DEBUG nova.objects.instance [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lazy-loading 'resources' on Instance uuid e9018928-5237-4ba1-8c18-9ff1ec64a79c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1419.820369] env[69992]: INFO nova.scheduler.client.report [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Deleted allocations for instance fc769b20-222e-4ff0-8ffd-7b24e4658b14 [ 1419.900267] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898091, 'name': Rename_Task, 'duration_secs': 0.335736} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.900497] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.900751] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14e1accd-986e-4ad1-b9f3-a2eb621b81f0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.904874] env[69992]: DEBUG oslo_vmware.api [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898092, 'name': PowerOffVM_Task, 'duration_secs': 0.344176} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.905499] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.905692] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.905918] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e9180a8-8de9-4552-a2cf-2d8746f430df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.910320] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1419.910320] env[69992]: value = "task-2898094" [ 1419.910320] env[69992]: _type = "Task" [ 1419.910320] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.919681] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.973177] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1419.973177] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1419.973325] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleting the datastore file [datastore1] 6ccc70f5-4857-4af3-99a1-f60ec35aebaf {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1419.973483] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd342ea3-3924-4b55-9344-f3b896ff1963 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.979820] env[69992]: DEBUG oslo_vmware.api [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for the task: (returnval){ [ 1419.979820] env[69992]: value = "task-2898096" [ 1419.979820] env[69992]: _type = "Task" [ 1419.979820] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.988338] env[69992]: DEBUG oslo_vmware.api [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.013587] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 408de352-797c-40c2-86bc-359e01c5c04e] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1420.137409] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.137781] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1336b7ab-7dd2-466d-a59f-6daf73dde092 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.147852] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898093, 'name': Rename_Task, 'duration_secs': 0.343028} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.149182] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1420.149573] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1420.149573] env[69992]: value = "task-2898097" [ 1420.149573] env[69992]: _type = "Task" [ 1420.149573] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.149767] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a35b47e8-cb7d-4920-a62c-068720f43c15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.160049] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.161705] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1420.161705] env[69992]: value = "task-2898098" [ 1420.161705] env[69992]: _type = "Task" [ 1420.161705] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.169574] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.328461] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42245632-151c-4d3e-b220-9987ac002b0d tempest-ServerAddressesTestJSON-1626327335 tempest-ServerAddressesTestJSON-1626327335-project-member] Lock "fc769b20-222e-4ff0-8ffd-7b24e4658b14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.262s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.425926] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898094, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.493569] env[69992]: DEBUG oslo_vmware.api [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Task: {'id': task-2898096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159156} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.496383] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1420.496615] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1420.496784] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1420.496970] env[69992]: INFO nova.compute.manager [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1420.497277] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1420.497719] env[69992]: DEBUG nova.compute.manager [-] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1420.497816] env[69992]: DEBUG nova.network.neutron [-] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.516455] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 5f98a2aa-eb7b-41d2-9e9f-14cee9445942] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1420.522138] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9408518f-ec60-47a6-a100-6652d7478b8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.529741] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3e9a8d-069d-4fbf-82d9-69d244331862 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.563782] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bae3c5-8a17-4ab4-a36e-ad9e6c9e2eb8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.568682] env[69992]: DEBUG nova.compute.manager [req-846cf8ef-0bcf-4460-a9dc-913e9e111359 req-5dfc9a6c-ee48-4e2b-8a1e-292fdf5b8748 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Received event network-vif-deleted-e6b569b5-e098-47d5-80c8-8c95df681396 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1420.568682] env[69992]: INFO nova.compute.manager [req-846cf8ef-0bcf-4460-a9dc-913e9e111359 req-5dfc9a6c-ee48-4e2b-8a1e-292fdf5b8748 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Neutron deleted interface e6b569b5-e098-47d5-80c8-8c95df681396; detaching it from the instance and deleting it from the info cache [ 1420.568682] env[69992]: DEBUG nova.network.neutron [req-846cf8ef-0bcf-4460-a9dc-913e9e111359 req-5dfc9a6c-ee48-4e2b-8a1e-292fdf5b8748 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.577178] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcf158c-d6f0-417e-be78-9339806f9f1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.592748] env[69992]: DEBUG nova.compute.provider_tree [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.660641] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898097, 'name': PowerOffVM_Task, 'duration_secs': 0.197144} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.661199] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.661535] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1420.674428] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.933058] env[69992]: DEBUG oslo_vmware.api [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898094, 'name': PowerOnVM_Task, 'duration_secs': 0.52677} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.933598] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1420.951057] env[69992]: DEBUG nova.network.neutron [-] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.955838] env[69992]: DEBUG nova.network.neutron [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.020598] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 37751af7-267e-4693-aaa3-cd1bb9c3d950] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1421.062482] env[69992]: DEBUG nova.compute.manager [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1421.063453] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63f033d-f703-4b5c-ad9b-e0fec27e0fbd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.076646] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-250fd871-b8ea-4c7d-b038-70dfafce7ed1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.083141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f239b941-2064-47aa-b000-77f5b959f97a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.097032] env[69992]: DEBUG nova.scheduler.client.report [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1421.122032] env[69992]: DEBUG nova.compute.manager [req-846cf8ef-0bcf-4460-a9dc-913e9e111359 req-5dfc9a6c-ee48-4e2b-8a1e-292fdf5b8748 service nova] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Detach interface failed, port_id=e6b569b5-e098-47d5-80c8-8c95df681396, reason: Instance e95e47c2-d82e-4153-8d16-7b65d992e91a could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1421.170995] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1421.171750] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.171932] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1421.172145] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.172299] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1421.172448] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1421.173686] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1421.173938] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1421.174167] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1421.175022] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1421.175022] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1421.179943] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34b2f854-0913-4378-bd32-267d01a06188 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.193361] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898098, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.198541] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1421.198541] env[69992]: value = "task-2898099" [ 1421.198541] env[69992]: _type = "Task" [ 1421.198541] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.211251] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898099, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.457722] env[69992]: INFO nova.compute.manager [-] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Took 1.74 seconds to deallocate network for instance. [ 1421.458134] env[69992]: INFO nova.compute.manager [-] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Took 1.75 seconds to deallocate network for instance. [ 1421.527041] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: c1c90aa6-922d-4315-8ead-2263a55a5d6e] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1421.583142] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6eb3ed23-06e1-496a-9118-e8cb82f52e18 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.679s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.602039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.804s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.605305] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.685s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1421.607577] env[69992]: INFO nova.compute.claims [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1421.624482] env[69992]: DEBUG nova.network.neutron [-] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.627737] env[69992]: INFO nova.scheduler.client.report [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Deleted allocations for instance e9018928-5237-4ba1-8c18-9ff1ec64a79c [ 1421.676267] env[69992]: DEBUG oslo_vmware.api [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898098, 'name': PowerOnVM_Task, 'duration_secs': 1.312253} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.676551] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1421.676752] env[69992]: INFO nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1421.676928] env[69992]: DEBUG nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1421.677757] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf1ccff-9958-4f75-92e2-f97bbb4f6804 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.710077] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898099, 'name': ReconfigVM_Task, 'duration_secs': 0.295783} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.710391] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1421.968085] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1421.968951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1422.030282] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 4e93b655-aaf4-49b8-bbb2-92287ec15bbc] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1422.132223] env[69992]: INFO nova.compute.manager [-] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Took 1.63 seconds to deallocate network for instance. [ 1422.144776] env[69992]: DEBUG oslo_concurrency.lockutils [None req-664357cf-3cf7-48d0-9cce-9d6f7e2097fc tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "e9018928-5237-4ba1-8c18-9ff1ec64a79c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.140s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1422.195088] env[69992]: INFO nova.compute.manager [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Took 19.10 seconds to build instance. [ 1422.222372] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1422.222752] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.222935] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1422.223157] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.223277] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1422.224115] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1422.224115] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1422.224115] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1422.224690] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1422.225062] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1422.225279] env[69992]: DEBUG nova.virt.hardware [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1422.230785] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1422.231425] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f138131-d214-4858-b6a4-92f5cf33a46c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.254621] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1422.254621] env[69992]: value = "task-2898100" [ 1422.254621] env[69992]: _type = "Task" [ 1422.254621] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.266452] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.534700] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: bce01d14-3c1b-4dce-b61c-721e25a56497] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1422.594980] env[69992]: DEBUG nova.compute.manager [req-2c799262-aa3d-4916-b610-170ba30c8758 req-c6806426-0540-4ae8-880c-3043be4ec840 service nova] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Received event network-vif-deleted-bb64cf0b-3b8e-4225-ba71-1524625e60a7 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1422.594980] env[69992]: DEBUG nova.compute.manager [req-2c799262-aa3d-4916-b610-170ba30c8758 req-c6806426-0540-4ae8-880c-3043be4ec840 service nova] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Received event network-vif-deleted-1b550e88-755a-45a1-98fd-6fcb8fa4a7a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1422.645159] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1422.697407] env[69992]: DEBUG oslo_concurrency.lockutils [None req-24695bbb-be94-4f48-aef8-0e87ebad6b1c tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.618s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1422.766516] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898100, 'name': ReconfigVM_Task, 'duration_secs': 0.179369} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.766516] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1422.767171] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ccf512-abd1-4614-a015-815f18fe208b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.796655] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c/d50d7460-2b70-45bc-940f-7d45f329fa1c.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1422.801730] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2c06bda-3090-4471-ab38-f096075a81c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.819746] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1422.819746] env[69992]: value = "task-2898101" [ 1422.819746] env[69992]: _type = "Task" [ 1422.819746] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.825798] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f07827-fd52-4320-9b02-d67b27623043 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.831997] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.837297] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20eb429e-5f25-402c-95c2-c8ed6b1e0aa1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.873672] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b63aa63-3c8f-4825-9cf9-74335e139301 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.881348] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f388afe1-8168-4a4a-9cdf-7961f2866910 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.895563] env[69992]: DEBUG nova.compute.provider_tree [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.037615] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 4609d6ce-9d5b-408d-8cb6-1baf76d85bb3] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1423.332071] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898101, 'name': ReconfigVM_Task, 'duration_secs': 0.384251} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.332339] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Reconfigured VM instance instance-0000006e to attach disk [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c/d50d7460-2b70-45bc-940f-7d45f329fa1c.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1423.332886] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1423.399837] env[69992]: DEBUG nova.scheduler.client.report [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1423.541043] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: f2ac32d7-d32b-497a-a262-ab1cd95f87d0] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1423.839879] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a9c653-b3d1-4f09-a5ce-4c57f168f6b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.860639] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eaae23-cd8b-4d3b-ba75-11eaaf59d3a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.865635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "904b8020-3060-4611-bdd4-650e288d69fd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1423.865635] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1423.865635] env[69992]: DEBUG nova.compute.manager [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1423.879539] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25a1799-9067-4b80-a6ed-50ad78f1f57b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.882735] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1423.889660] env[69992]: DEBUG nova.compute.manager [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1423.889977] env[69992]: DEBUG nova.objects.instance [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'flavor' on Instance uuid 904b8020-3060-4611-bdd4-650e288d69fd {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1423.904147] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1423.904606] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1423.909958] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.942s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1423.910421] env[69992]: DEBUG nova.objects.instance [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'resources' on Instance uuid e95e47c2-d82e-4153-8d16-7b65d992e91a {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1423.911996] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47254b88-da89-4f69-88df-44a09e366360 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.919433] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Suspending the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1423.919608] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-602062d1-95d0-4e16-b5dc-f2b4f57c898f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.925800] env[69992]: DEBUG oslo_vmware.api [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1423.925800] env[69992]: value = "task-2898102" [ 1423.925800] env[69992]: _type = "Task" [ 1423.925800] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.933877] env[69992]: DEBUG oslo_vmware.api [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898102, 'name': SuspendVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.044426] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: f64108ec-c3b2-4b11-9085-2c56b0de93f5] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1424.410576] env[69992]: DEBUG nova.compute.utils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1424.412682] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1424.412682] env[69992]: DEBUG nova.network.neutron [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1424.436829] env[69992]: DEBUG oslo_vmware.api [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898102, 'name': SuspendVM_Task} progress is 37%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.442647] env[69992]: DEBUG nova.network.neutron [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Port 0b3a8c02-b431-4538-b679-fba08b7e9e8e binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1424.467622] env[69992]: DEBUG nova.policy [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ede57670ddc4434a9ba4745870ddfa14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53308426a9c44f46b78a155e612ee5a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1424.476769] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1424.476769] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1424.548031] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 546fb923-4574-4407-8625-69e6c4d8d35e] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1424.635565] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f31210c-8e9a-4504-8e36-a3e66fc127cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.649930] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3055310c-1731-4f51-9a5b-42736c2ae0d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.684656] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227476ea-99e0-4ff9-892e-ed42eae99ab1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.694055] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f456ad4a-7913-4b30-be6f-cf7fc72e5296 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.707373] env[69992]: DEBUG nova.compute.provider_tree [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1424.777873] env[69992]: DEBUG nova.network.neutron [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Successfully created port: 2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1424.895874] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1424.896329] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1878f0b-7a40-425d-b23e-71b623160513 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.905234] env[69992]: DEBUG oslo_vmware.api [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1424.905234] env[69992]: value = "task-2898103" [ 1424.905234] env[69992]: _type = "Task" [ 1424.905234] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.912649] env[69992]: DEBUG oslo_vmware.api [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.915323] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1424.935495] env[69992]: DEBUG oslo_vmware.api [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898102, 'name': SuspendVM_Task} progress is 37%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.978689] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1425.053408] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9df7b187-e579-41b0-9d24-be2a1ae93079] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1425.211098] env[69992]: DEBUG nova.scheduler.client.report [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1425.415685] env[69992]: DEBUG oslo_vmware.api [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898103, 'name': PowerOffVM_Task, 'duration_secs': 0.450639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.415685] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.415857] env[69992]: DEBUG nova.compute.manager [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1425.417028] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cf9451-bb56-491b-9cbe-6fc40593f200 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.440072] env[69992]: DEBUG oslo_vmware.api [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898102, 'name': SuspendVM_Task, 'duration_secs': 1.421587} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.440072] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Suspended the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1425.440072] env[69992]: DEBUG nova.compute.manager [None req-1aed9dd5-be6a-4cda-8f5c-fe2a082355c2 tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1425.440467] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b11441-404f-46d2-b4eb-034a918d2091 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.465144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1425.465144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1425.465144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1425.502678] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1425.556228] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 32bdb15d-6a4d-4445-9b82-d18b0f6743b6] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1425.715560] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.805s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1425.718460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.749s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1425.718460] env[69992]: DEBUG nova.objects.instance [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lazy-loading 'resources' on Instance uuid 08869f38-9609-4f7f-9110-2f26fd1cb3f7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1425.735537] env[69992]: INFO nova.scheduler.client.report [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted allocations for instance e95e47c2-d82e-4153-8d16-7b65d992e91a [ 1425.926901] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1425.937630] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8234b9d1-a677-4bd0-9925-dbaa18445dfd tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1425.957093] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1425.957340] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.957493] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1425.957672] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.958573] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1425.958573] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1425.958573] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1425.958573] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1425.958573] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1425.958908] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1425.958908] env[69992]: DEBUG nova.virt.hardware [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1425.959967] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdbbabf-1694-4ad1-82a8-83236e41b809 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.969689] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f73e849-e938-471f-afc7-d9363b100f24 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.060015] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 086ac14d-74bb-4bb6-90b3-3e345b2894a9] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1426.231884] env[69992]: DEBUG nova.network.neutron [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Successfully updated port: 2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1426.242809] env[69992]: DEBUG nova.compute.manager [req-6a193479-6ced-41d9-a5b3-96e5c74123a0 req-11c45dda-3faf-4960-bbd7-efd926369ff5 service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Received event network-vif-plugged-2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1426.243090] env[69992]: DEBUG oslo_concurrency.lockutils [req-6a193479-6ced-41d9-a5b3-96e5c74123a0 req-11c45dda-3faf-4960-bbd7-efd926369ff5 service nova] Acquiring lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.243314] env[69992]: DEBUG oslo_concurrency.lockutils [req-6a193479-6ced-41d9-a5b3-96e5c74123a0 req-11c45dda-3faf-4960-bbd7-efd926369ff5 service nova] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.243485] env[69992]: DEBUG oslo_concurrency.lockutils [req-6a193479-6ced-41d9-a5b3-96e5c74123a0 req-11c45dda-3faf-4960-bbd7-efd926369ff5 service nova] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.243650] env[69992]: DEBUG nova.compute.manager [req-6a193479-6ced-41d9-a5b3-96e5c74123a0 req-11c45dda-3faf-4960-bbd7-efd926369ff5 service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] No waiting events found dispatching network-vif-plugged-2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1426.243814] env[69992]: WARNING nova.compute.manager [req-6a193479-6ced-41d9-a5b3-96e5c74123a0 req-11c45dda-3faf-4960-bbd7-efd926369ff5 service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Received unexpected event network-vif-plugged-2938c230-8b81-4b8d-96c2-6cf38f5d97af for instance with vm_state building and task_state spawning. [ 1426.247596] env[69992]: DEBUG oslo_concurrency.lockutils [None req-58837f15-f923-447b-9f73-1ecf07efabba tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "e95e47c2-d82e-4153-8d16-7b65d992e91a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.195s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.372572] env[69992]: INFO nova.compute.manager [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Resuming [ 1426.373233] env[69992]: DEBUG nova.objects.instance [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'flavor' on Instance uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1426.375802] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323688f5-e4a6-420c-a8ef-09d9cf989813 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.385148] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0bd80b-9c88-4405-85d0-2692c695acb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.414660] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f2cd59-380a-4f64-b426-4273d2b38d49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.422131] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd71094-8301-40d6-8cda-27b0c37340d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.437018] env[69992]: DEBUG nova.compute.provider_tree [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.499858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.500068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1426.500253] env[69992]: DEBUG nova.network.neutron [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1426.565115] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 131096fc-addf-4d9a-9cd7-4abe98aabd1f] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1426.592236] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "904b8020-3060-4611-bdd4-650e288d69fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.592499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.592706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "904b8020-3060-4611-bdd4-650e288d69fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.592891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.593119] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.595522] env[69992]: INFO nova.compute.manager [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Terminating instance [ 1426.735422] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "refresh_cache-c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.735583] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "refresh_cache-c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1426.735750] env[69992]: DEBUG nova.network.neutron [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1426.942150] env[69992]: DEBUG nova.scheduler.client.report [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1427.068772] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7932a42f-6a62-4c2c-be9a-3cb518fe4183] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1427.099480] env[69992]: DEBUG nova.compute.manager [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1427.099578] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1427.100661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbc6a6c-b1dd-494b-8b14-e10e62e569f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.109076] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1427.109214] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7fa4001-22d1-42ae-8407-e20a96600411 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.178162] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1427.178392] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1427.178575] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore2] 904b8020-3060-4611-bdd4-650e288d69fd {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1427.178841] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d708fc4c-2812-4b95-94a8-e8a9811199b8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.188843] env[69992]: DEBUG oslo_vmware.api [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1427.188843] env[69992]: value = "task-2898105" [ 1427.188843] env[69992]: _type = "Task" [ 1427.188843] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.198097] env[69992]: DEBUG oslo_vmware.api [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.228289] env[69992]: DEBUG nova.network.neutron [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [{"id": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "address": "fa:16:3e:a1:2b:47", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b3a8c02-b4", "ovs_interfaceid": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.273056] env[69992]: DEBUG nova.network.neutron [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1427.420388] env[69992]: DEBUG nova.network.neutron [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Updating instance_info_cache with network_info: [{"id": "2938c230-8b81-4b8d-96c2-6cf38f5d97af", "address": "fa:16:3e:d5:44:75", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2938c230-8b", "ovs_interfaceid": "2938c230-8b81-4b8d-96c2-6cf38f5d97af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.447483] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1427.449674] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.805s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1427.449905] env[69992]: DEBUG nova.objects.instance [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lazy-loading 'resources' on Instance uuid 6ccc70f5-4857-4af3-99a1-f60ec35aebaf {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.480845] env[69992]: INFO nova.scheduler.client.report [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Deleted allocations for instance 08869f38-9609-4f7f-9110-2f26fd1cb3f7 [ 1427.572242] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 4cd9fb91-44f1-4304-a2bf-c8b294b19e0e] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1427.697885] env[69992]: DEBUG oslo_vmware.api [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140509} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.698159] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1427.698348] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1427.698522] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1427.698694] env[69992]: INFO nova.compute.manager [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1427.698936] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1427.699146] env[69992]: DEBUG nova.compute.manager [-] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1427.699241] env[69992]: DEBUG nova.network.neutron [-] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1427.733394] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1427.891529] env[69992]: DEBUG oslo_concurrency.lockutils [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.891725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquired lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1427.891947] env[69992]: DEBUG nova.network.neutron [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1427.923107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "refresh_cache-c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1427.923461] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Instance network_info: |[{"id": "2938c230-8b81-4b8d-96c2-6cf38f5d97af", "address": "fa:16:3e:d5:44:75", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2938c230-8b", "ovs_interfaceid": "2938c230-8b81-4b8d-96c2-6cf38f5d97af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1427.923864] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:44:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2938c230-8b81-4b8d-96c2-6cf38f5d97af', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1427.931420] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1427.933058] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1427.933455] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a77a9e2-9bdd-4173-b61e-23b3cb4e02b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.955856] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1427.955856] env[69992]: value = "task-2898106" [ 1427.955856] env[69992]: _type = "Task" [ 1427.955856] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.963511] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898106, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.991024] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b8423afe-2ee0-45b7-a5d0-cdf0564d191f tempest-AttachVolumeShelveTestJSON-419349134 tempest-AttachVolumeShelveTestJSON-419349134-project-member] Lock "08869f38-9609-4f7f-9110-2f26fd1cb3f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.020s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1428.075245] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 1b4da2ab-d026-45d8-8234-79ddd84d5cbb] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1428.092927] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fa7995-4d73-44e0-b737-a64036f6c93a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.100839] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360893dc-4c87-4b65-a975-2ed269519585 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.132456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accd07ce-d6ef-4b0c-b6a1-23008fb9bc82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.140677] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87038b8-b4c9-4fef-b173-884f8511bb84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.155336] env[69992]: DEBUG nova.compute.provider_tree [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.259568] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542d0cce-ff98-4fda-8f0e-0bcd6b6ab9c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.267095] env[69992]: DEBUG nova.compute.manager [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Received event network-changed-2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1428.267431] env[69992]: DEBUG nova.compute.manager [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Refreshing instance network info cache due to event network-changed-2938c230-8b81-4b8d-96c2-6cf38f5d97af. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1428.267644] env[69992]: DEBUG oslo_concurrency.lockutils [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] Acquiring lock "refresh_cache-c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.267794] env[69992]: DEBUG oslo_concurrency.lockutils [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] Acquired lock "refresh_cache-c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1428.267970] env[69992]: DEBUG nova.network.neutron [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Refreshing network info cache for port 2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1428.286718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07505385-6e69-47c2-ae77-1862956e96b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.294521] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1428.436602] env[69992]: DEBUG nova.network.neutron [-] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.466186] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898106, 'name': CreateVM_Task, 'duration_secs': 0.300614} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.466186] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1428.466730] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.467383] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1428.467710] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1428.469356] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-310460f5-9759-4d55-a735-585f432ab1cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.474116] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.474349] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.479185] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1428.479185] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5299a1cb-3e51-3eb0-6f46-7c7e1539edd3" [ 1428.479185] env[69992]: _type = "Task" [ 1428.479185] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.491083] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5299a1cb-3e51-3eb0-6f46-7c7e1539edd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.580282] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 673be00f-e3c5-4a54-beeb-cf89828e9e32] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1428.611957] env[69992]: DEBUG nova.network.neutron [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [{"id": "0042c1e4-d906-4261-a18e-ce232533cbdd", "address": "fa:16:3e:44:45:52", "network": {"id": "58824cf0-bce0-4f1b-9942-dd68624dd3ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1287894269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1471cdd6671b4e6ebc23b8fc2b120b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0042c1e4-d9", "ovs_interfaceid": "0042c1e4-d906-4261-a18e-ce232533cbdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.659513] env[69992]: DEBUG nova.scheduler.client.report [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1428.800787] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1428.801133] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e50abcb4-6eb1-43fe-9549-5d024c69904b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.809469] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1428.809469] env[69992]: value = "task-2898107" [ 1428.809469] env[69992]: _type = "Task" [ 1428.809469] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.819412] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.939587] env[69992]: INFO nova.compute.manager [-] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Took 1.24 seconds to deallocate network for instance. [ 1428.976978] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1428.991359] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5299a1cb-3e51-3eb0-6f46-7c7e1539edd3, 'name': SearchDatastore_Task, 'duration_secs': 0.010394} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.991359] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1428.991359] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1428.991359] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.991664] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1428.991756] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1428.992607] env[69992]: DEBUG nova.network.neutron [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Updated VIF entry in instance network info cache for port 2938c230-8b81-4b8d-96c2-6cf38f5d97af. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1428.993142] env[69992]: DEBUG nova.network.neutron [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Updating instance_info_cache with network_info: [{"id": "2938c230-8b81-4b8d-96c2-6cf38f5d97af", "address": "fa:16:3e:d5:44:75", "network": {"id": "838abbcd-8525-47f9-b3e0-eb738a0cea7e", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1428842137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53308426a9c44f46b78a155e612ee5a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2938c230-8b", "ovs_interfaceid": "2938c230-8b81-4b8d-96c2-6cf38f5d97af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.994731] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca73ac8b-8d2b-44fd-a542-688ecae9cff8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.006860] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1429.007062] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1429.007784] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89f21146-19e0-46e5-b372-7aa29c2f513d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.012921] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1429.012921] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52227994-93ef-c567-5f77-3b28e1bdfa51" [ 1429.012921] env[69992]: _type = "Task" [ 1429.012921] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.020442] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52227994-93ef-c567-5f77-3b28e1bdfa51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.084438] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 0e8163d9-6ff5-4f1e-af33-ccb42fa46750] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1429.114307] env[69992]: DEBUG oslo_concurrency.lockutils [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Releasing lock "refresh_cache-25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1429.115466] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805fa09f-4679-4742-8c70-4ddfe8f4c9ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.122704] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Resuming the VM {{(pid=69992) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1429.122984] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27235565-d7a6-4f67-9cea-d965af889cbc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.129276] env[69992]: DEBUG oslo_vmware.api [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1429.129276] env[69992]: value = "task-2898108" [ 1429.129276] env[69992]: _type = "Task" [ 1429.129276] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.137759] env[69992]: DEBUG oslo_vmware.api [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.165448] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.168571] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.666s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.170584] env[69992]: INFO nova.compute.claims [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1429.192050] env[69992]: INFO nova.scheduler.client.report [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Deleted allocations for instance 6ccc70f5-4857-4af3-99a1-f60ec35aebaf [ 1429.321731] env[69992]: DEBUG oslo_vmware.api [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898107, 'name': PowerOnVM_Task, 'duration_secs': 0.389112} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.322066] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.322319] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a1dff-c865-419b-9346-d08d97aa8ae5 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance 'd50d7460-2b70-45bc-940f-7d45f329fa1c' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1429.446470] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1429.498238] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1429.499887] env[69992]: DEBUG oslo_concurrency.lockutils [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] Releasing lock "refresh_cache-c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1429.500183] env[69992]: DEBUG nova.compute.manager [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Received event network-vif-deleted-e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1429.500423] env[69992]: INFO nova.compute.manager [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Neutron deleted interface e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65; detaching it from the instance and deleting it from the info cache [ 1429.500630] env[69992]: DEBUG nova.network.neutron [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.523283] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52227994-93ef-c567-5f77-3b28e1bdfa51, 'name': SearchDatastore_Task, 'duration_secs': 0.007845} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.524084] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22f4faba-790d-4442-9ff3-62d2e5c857a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.529484] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1429.529484] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52452aed-7e12-4e73-982c-bc871c3b3161" [ 1429.529484] env[69992]: _type = "Task" [ 1429.529484] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.537470] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52452aed-7e12-4e73-982c-bc871c3b3161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.588018] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a06d4b38-0e39-46ef-a588-7627661cb201] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1429.638749] env[69992]: DEBUG oslo_vmware.api [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898108, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.701064] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f44ed15c-2fee-4413-a918-e3e53304c26e tempest-AttachInterfacesTestJSON-1431277975 tempest-AttachInterfacesTestJSON-1431277975-project-member] Lock "6ccc70f5-4857-4af3-99a1-f60ec35aebaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.840s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1430.003541] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3231fe02-7953-4077-894c-a00d21e9233a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.016885] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efee0d04-26ab-41f6-bd07-795ec9ac50fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.044257] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52452aed-7e12-4e73-982c-bc871c3b3161, 'name': SearchDatastore_Task, 'duration_secs': 0.009738} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.044539] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1430.044802] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] c2268475-6506-4c1f-8f8a-7b8d3a5cb28c/c2268475-6506-4c1f-8f8a-7b8d3a5cb28c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1430.045718] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cceeeae-c63f-4ff5-b3ee-fb3b5a649116 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.058982] env[69992]: DEBUG nova.compute.manager [req-31db5788-0105-4b2a-9f7c-ec33b45f7d18 req-0b0b15bc-bd4a-4e68-bb28-d5f1c86a94ea service nova] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Detach interface failed, port_id=e6aa5742-9e1a-4fa9-b7e1-abc39d8f9c65, reason: Instance 904b8020-3060-4611-bdd4-650e288d69fd could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1430.061170] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1430.061170] env[69992]: value = "task-2898109" [ 1430.061170] env[69992]: _type = "Task" [ 1430.061170] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.074984] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.092173] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7fc7c481-75e8-40f2-a971-752ce6dde59b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1430.142580] env[69992]: DEBUG oslo_vmware.api [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898108, 'name': PowerOnVM_Task, 'duration_secs': 0.752131} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.142929] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Resumed the VM {{(pid=69992) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1430.143172] env[69992]: DEBUG nova.compute.manager [None req-59fa7d98-06d3-437f-9f34-2e150220a27d tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1430.143939] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9392e0c6-7938-48ed-9ead-9d62184ea8f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.349048] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cd75c1-a8e2-4c1b-b3cd-0a4775ad1307 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.357478] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd696b9-3a10-41af-af54-e11bad18dc80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.394874] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afedacd7-d0c5-46de-b237-f7135e35de92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.404603] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc916b72-8e67-43dd-94f6-aedd21a7db65 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.420233] env[69992]: DEBUG nova.compute.provider_tree [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.570491] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898109, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498299} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.570738] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] c2268475-6506-4c1f-8f8a-7b8d3a5cb28c/c2268475-6506-4c1f-8f8a-7b8d3a5cb28c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1430.571063] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1430.571352] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bb07a56-6b3f-4137-a683-f86c8f2c90bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.577936] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1430.577936] env[69992]: value = "task-2898111" [ 1430.577936] env[69992]: _type = "Task" [ 1430.577936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.585704] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.598332] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: af07ebd0-5f12-49c3-a518-95be9a8d6c82] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1430.925028] env[69992]: DEBUG nova.scheduler.client.report [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1431.087325] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.287093} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.087602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1431.088390] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758d8e50-3c7d-445b-a390-8c2e8c5d8ede {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.110450] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] c2268475-6506-4c1f-8f8a-7b8d3a5cb28c/c2268475-6506-4c1f-8f8a-7b8d3a5cb28c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1431.110913] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: b7a1b9e1-4d57-435f-bdb6-51481968aacb] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1431.113330] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a38bf7dc-e84f-4d84-9eea-d0097d8ea79e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.134046] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1431.134046] env[69992]: value = "task-2898112" [ 1431.134046] env[69992]: _type = "Task" [ 1431.134046] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.143551] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898112, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.364429] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.364820] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.364967] env[69992]: DEBUG nova.compute.manager [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Going to confirm migration 7 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1431.430414] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.431249] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1431.435302] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.989s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.435562] env[69992]: DEBUG nova.objects.instance [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'resources' on Instance uuid 904b8020-3060-4611-bdd4-650e288d69fd {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1431.613953] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 30eada4d-fdc1-4e54-99d2-d35bbb3fcbe6] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1431.643499] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898112, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.937064] env[69992]: DEBUG nova.compute.utils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1431.938553] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1431.938739] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1431.954147] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.954375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquired lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1431.954584] env[69992]: DEBUG nova.network.neutron [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1431.954802] env[69992]: DEBUG nova.objects.instance [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'info_cache' on Instance uuid d50d7460-2b70-45bc-940f-7d45f329fa1c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1432.010727] env[69992]: DEBUG nova.policy [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4961e3352fc94012a5ad457736da538c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d5c32fe8b254c5abdd4123bd2088353', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1432.117585] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: fc04a536-3a6f-4d3a-b0f1-68e9a1cee5e2] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1432.146261] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898112, 'name': ReconfigVM_Task, 'duration_secs': 0.52578} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.146899] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Reconfigured VM instance instance-00000071 to attach disk [datastore1] c2268475-6506-4c1f-8f8a-7b8d3a5cb28c/c2268475-6506-4c1f-8f8a-7b8d3a5cb28c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1432.147880] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10cde9c8-f6e7-4c79-89f2-111aa65caeb0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.161173] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1432.161173] env[69992]: value = "task-2898113" [ 1432.161173] env[69992]: _type = "Task" [ 1432.161173] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.168718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a9b7cb-3b53-4ce7-b50f-640dd82bcc65 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.175407] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898113, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.180774] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031e7622-f77c-426d-a48d-d79f43532d92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.213873] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46874c1-aa9c-4182-8ca9-033198d52247 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.221992] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f951cb0d-4f3b-466a-ad1e-5d9894c030d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.235959] env[69992]: DEBUG nova.compute.provider_tree [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.344286] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Successfully created port: 5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1432.445571] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1432.624113] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a7f01cd7-f148-48fc-a71a-5461672d6039] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1432.624113] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Successfully created port: 3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1432.671199] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898113, 'name': Rename_Task, 'duration_secs': 0.13681} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.671199] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1432.671199] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b107122b-cabf-4dc4-ae36-243825fd793f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.678496] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1432.678496] env[69992]: value = "task-2898114" [ 1432.678496] env[69992]: _type = "Task" [ 1432.678496] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.686250] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.740146] env[69992]: DEBUG nova.scheduler.client.report [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1433.131679] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: bcb5131c-b2c6-4971-8a2e-4fcd7133442d] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1433.172271] env[69992]: DEBUG nova.network.neutron [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [{"id": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "address": "fa:16:3e:a1:2b:47", "network": {"id": "ef950ccf-7246-4fba-b1d2-5829e51d7f7d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1093076994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b8716c4b7324052a3472734c655655a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ed4797-90ad-44cd-bbcb-e90b2a8400f3", "external-id": "nsx-vlan-transportzone-699", "segmentation_id": 699, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b3a8c02-b4", "ovs_interfaceid": "0b3a8c02-b431-4538-b679-fba08b7e9e8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.191224] env[69992]: DEBUG oslo_vmware.api [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898114, 'name': PowerOnVM_Task, 'duration_secs': 0.461254} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.191555] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1433.191797] env[69992]: INFO nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Took 7.26 seconds to spawn the instance on the hypervisor. [ 1433.192015] env[69992]: DEBUG nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1433.192903] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfbdcef-a59f-4600-8dd1-95fe6c4fcfba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.246707] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.250978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.753s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1433.253492] env[69992]: INFO nova.compute.claims [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1433.283189] env[69992]: INFO nova.scheduler.client.report [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance 904b8020-3060-4611-bdd4-650e288d69fd [ 1433.454244] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1433.487013] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1433.487234] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1433.487377] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1433.487527] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1433.487672] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1433.487817] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1433.488096] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1433.488223] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1433.488450] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1433.488553] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1433.488726] env[69992]: DEBUG nova.virt.hardware [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1433.489629] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8e4a6d-27da-4bae-a215-22f2fe1998e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.497834] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbd0e80-d44e-4c16-b938-b028fa44f98c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.636700] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a8813822-f77b-4b73-a6dc-e0eab83b0402] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1433.678575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Releasing lock "refresh_cache-d50d7460-2b70-45bc-940f-7d45f329fa1c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1433.678838] env[69992]: DEBUG nova.objects.instance [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lazy-loading 'migration_context' on Instance uuid d50d7460-2b70-45bc-940f-7d45f329fa1c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1433.717144] env[69992]: INFO nova.compute.manager [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Took 16.81 seconds to build instance. [ 1433.795303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d1b46026-2bde-4908-bedf-a49cb0e0ac40 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "904b8020-3060-4611-bdd4-650e288d69fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.201s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.940348] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "7c8b830a-e89c-4d97-a987-141797aaa55f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1433.940577] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.048459] env[69992]: DEBUG nova.compute.manager [req-6f2c1026-f003-467a-92ef-fe54f2696a43 req-b1eac0c3-5f85-42a9-b196-f7d494c41485 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received event network-vif-plugged-5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1434.048619] env[69992]: DEBUG oslo_concurrency.lockutils [req-6f2c1026-f003-467a-92ef-fe54f2696a43 req-b1eac0c3-5f85-42a9-b196-f7d494c41485 service nova] Acquiring lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.048825] env[69992]: DEBUG oslo_concurrency.lockutils [req-6f2c1026-f003-467a-92ef-fe54f2696a43 req-b1eac0c3-5f85-42a9-b196-f7d494c41485 service nova] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.048995] env[69992]: DEBUG oslo_concurrency.lockutils [req-6f2c1026-f003-467a-92ef-fe54f2696a43 req-b1eac0c3-5f85-42a9-b196-f7d494c41485 service nova] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.049391] env[69992]: DEBUG nova.compute.manager [req-6f2c1026-f003-467a-92ef-fe54f2696a43 req-b1eac0c3-5f85-42a9-b196-f7d494c41485 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] No waiting events found dispatching network-vif-plugged-5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1434.049577] env[69992]: WARNING nova.compute.manager [req-6f2c1026-f003-467a-92ef-fe54f2696a43 req-b1eac0c3-5f85-42a9-b196-f7d494c41485 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received unexpected event network-vif-plugged-5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 for instance with vm_state building and task_state spawning. [ 1434.095101] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Successfully updated port: 5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1434.139589] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: dd31269e-716c-44cd-9fc3-ce227fe5b3b2] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1434.181616] env[69992]: DEBUG nova.objects.base [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1434.182552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea663e77-e79b-45e1-92d9-51696824f95a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.209505] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd62b48e-c33c-4a06-b719-a074d9d67b7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.216635] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1434.216635] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a37995-e107-2c66-6bd0-a004082864e2" [ 1434.216635] env[69992]: _type = "Task" [ 1434.216635] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.220262] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5805411-fe58-4092-b821-d9e38ca9b68a tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.321s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.225017] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a37995-e107-2c66-6bd0-a004082864e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.288798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.288798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.288798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.288798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.288798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.295518] env[69992]: INFO nova.compute.manager [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Terminating instance [ 1434.443088] env[69992]: DEBUG nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1434.473179] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2488cd4-4beb-4601-90f6-d2b8cf90d7d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.481785] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880ed6b2-7e47-43bf-a3bc-4d5531ffc567 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.515382] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a380d6d3-27f8-4b2e-87db-e33083bbc7f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.523463] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee6797c-07b3-4806-95ae-bd75418f1a43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.537591] env[69992]: DEBUG nova.compute.provider_tree [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1434.642894] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: eec50935-f553-43c7-b67b-7289299745bd] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1434.728950] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a37995-e107-2c66-6bd0-a004082864e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009669} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.729300] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.804980] env[69992]: DEBUG nova.compute.manager [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1434.806465] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1434.806597] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438be6dc-5fe8-4b86-9d58-b2ac9db752d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.816302] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1434.816302] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bdf3b3c-1df4-4287-ac52-0b0e4c6c0952 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.822990] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1434.822990] env[69992]: value = "task-2898115" [ 1434.822990] env[69992]: _type = "Task" [ 1434.822990] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.834849] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.978641] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.043275] env[69992]: DEBUG nova.scheduler.client.report [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1435.148140] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: a49b4721-e338-4e60-b91e-137caa3c9c03] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1435.218512] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.218702] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.218914] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "fcbe1142-72dc-4a02-af9b-e03a2031a247-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.219103] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.219271] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1435.221507] env[69992]: INFO nova.compute.manager [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Terminating instance [ 1435.332519] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898115, 'name': PowerOffVM_Task, 'duration_secs': 0.263266} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.332849] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1435.333017] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1435.333371] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a501ad71-6645-439a-b301-23b3f46205af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.416877] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1435.417215] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1435.417428] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleting the datastore file [datastore1] c2268475-6506-4c1f-8f8a-7b8d3a5cb28c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1435.417799] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87821251-2a16-451c-b87b-bc81346f0940 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.423816] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for the task: (returnval){ [ 1435.423816] env[69992]: value = "task-2898117" [ 1435.423816] env[69992]: _type = "Task" [ 1435.423816] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.431480] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.546683] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1435.547224] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1435.549957] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.821s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.652101] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 27492ef7-8258-4001-b3b3-5bcb94e12c1f] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1435.725987] env[69992]: DEBUG nova.compute.manager [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1435.726350] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1435.728205] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7097d784-bc0b-411d-9266-f4d61f22db95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.738615] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.738936] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70c9dab2-885c-4a01-a736-39c13ec3b119 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.745862] env[69992]: DEBUG oslo_vmware.api [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1435.745862] env[69992]: value = "task-2898118" [ 1435.745862] env[69992]: _type = "Task" [ 1435.745862] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.754730] env[69992]: DEBUG oslo_vmware.api [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.934158] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.053502] env[69992]: DEBUG nova.compute.utils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1436.058020] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1436.058211] env[69992]: DEBUG nova.network.neutron [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1436.083909] env[69992]: DEBUG nova.compute.manager [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received event network-changed-5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1436.084190] env[69992]: DEBUG nova.compute.manager [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Refreshing instance network info cache due to event network-changed-5e4cacd9-f025-4a13-8d5b-615b3c9e12a3. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1436.085709] env[69992]: DEBUG oslo_concurrency.lockutils [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] Acquiring lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.085709] env[69992]: DEBUG oslo_concurrency.lockutils [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] Acquired lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1436.085709] env[69992]: DEBUG nova.network.neutron [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Refreshing network info cache for port 5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1436.129663] env[69992]: DEBUG nova.policy [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94f19c179a3545089bcc66b7e5dc36e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4da04b8933ad4d2ba4b1c193853f31b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1436.156296] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 1d436762-964d-40d9-871e-ee33c3ba25b5] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1436.239442] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4308b94-d127-49bc-a27a-f5e1880a4227 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.251916] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43b6fe8-c9b7-44ef-851f-ba4e7d5c75cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.260996] env[69992]: DEBUG oslo_vmware.api [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898118, 'name': PowerOffVM_Task, 'duration_secs': 0.306143} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.290101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1436.290626] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1436.291650] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f22fea7-1f0e-4ca9-8ba7-45b76a14b357 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.294237] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303cb897-ae2e-4355-b359-4a6e14dc1032 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.305406] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7ddbcd-6d8e-4035-8a36-8abea4a9ab13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.311720] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Successfully updated port: 3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1436.322690] env[69992]: DEBUG nova.compute.provider_tree [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.356707] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1436.356937] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1436.357163] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleting the datastore file [datastore1] fcbe1142-72dc-4a02-af9b-e03a2031a247 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1436.357453] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42c30a67-4f71-4ef2-8c74-359366058218 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.365338] env[69992]: DEBUG oslo_vmware.api [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for the task: (returnval){ [ 1436.365338] env[69992]: value = "task-2898120" [ 1436.365338] env[69992]: _type = "Task" [ 1436.365338] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.372942] env[69992]: DEBUG oslo_vmware.api [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.435774] env[69992]: DEBUG oslo_vmware.api [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Task: {'id': task-2898117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.546705} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.436040] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1436.436243] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1436.436466] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1436.436656] env[69992]: INFO nova.compute.manager [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1436.437026] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1436.437091] env[69992]: DEBUG nova.compute.manager [-] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1436.437182] env[69992]: DEBUG nova.network.neutron [-] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1436.517838] env[69992]: DEBUG nova.network.neutron [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Successfully created port: 7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1436.558702] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1437.365771] env[69992]: DEBUG nova.network.neutron [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1437.372020] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: fe20ed11-92f4-4b9f-9e13-68cbbf9d23f0] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1437.372020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.372020] env[69992]: DEBUG nova.scheduler.client.report [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1437.377843] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.378070] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.378280] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.378469] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.378648] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1437.384510] env[69992]: INFO nova.compute.manager [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Terminating instance [ 1437.392958] env[69992]: DEBUG oslo_vmware.api [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Task: {'id': task-2898120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165739} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.396009] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1437.396009] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1437.396009] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1437.396009] env[69992]: INFO nova.compute.manager [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1437.396009] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1437.396009] env[69992]: DEBUG nova.compute.manager [-] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1437.396009] env[69992]: DEBUG nova.network.neutron [-] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1437.470772] env[69992]: DEBUG nova.network.neutron [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.883167] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1437.889008] env[69992]: DEBUG nova.network.neutron [-] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.890778] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.890925] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances with incomplete migration {{(pid=69992) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1437.894360] env[69992]: DEBUG nova.compute.manager [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1437.894360] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1437.895605] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bfc7bd-a122-4b4e-954c-e64d73ae33d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.903139] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1437.903513] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-749e73cc-cf83-4ac5-928e-bfac22356cad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.913017] env[69992]: DEBUG oslo_vmware.api [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1437.913017] env[69992]: value = "task-2898121" [ 1437.913017] env[69992]: _type = "Task" [ 1437.913017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.924502] env[69992]: DEBUG oslo_vmware.api [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.968184] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1437.968408] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1437.968595] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1437.968802] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1437.968955] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1437.969138] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1437.969384] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1437.969567] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1437.969749] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1437.969922] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1437.970129] env[69992]: DEBUG nova.virt.hardware [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1437.971028] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3f73bc-b298-49cf-945c-c7e8612642f2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.974508] env[69992]: DEBUG oslo_concurrency.lockutils [req-09ab17ce-0728-4ea6-8bab-f10e36dedfb6 req-5e6602c6-f727-4cf5-85b7-37c9dfb68038 service nova] Releasing lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1437.975091] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1437.975226] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.982463] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1984a020-7a4f-4848-8c75-62b7d67dc955 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.112634] env[69992]: DEBUG nova.network.neutron [-] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.121539] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received event network-vif-plugged-3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1438.121757] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Acquiring lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.121961] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.122143] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.122310] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] No waiting events found dispatching network-vif-plugged-3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1438.122473] env[69992]: WARNING nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received unexpected event network-vif-plugged-3b4a3277-a4e1-4872-87b4-f4fcaadff6bc for instance with vm_state building and task_state spawning. [ 1438.122635] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received event network-changed-3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1438.122789] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Refreshing instance network info cache due to event network-changed-3b4a3277-a4e1-4872-87b4-f4fcaadff6bc. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1438.122953] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Acquiring lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.147655] env[69992]: DEBUG nova.network.neutron [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Successfully updated port: 7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1438.391497] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.841s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.394748] env[69992]: INFO nova.compute.manager [-] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Took 1.96 seconds to deallocate network for instance. [ 1438.395105] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.417s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.396564] env[69992]: INFO nova.compute.claims [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1438.400863] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.422762] env[69992]: DEBUG oslo_vmware.api [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898121, 'name': PowerOffVM_Task, 'duration_secs': 0.292206} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.422996] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1438.423186] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1438.423490] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a3d3976-e42a-4f7a-918a-9aa607c03158 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.509987] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1438.510239] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1438.510419] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleting the datastore file [datastore2] 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1438.510695] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-936590f5-5043-4db6-8087-37c2a11aa2c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.516819] env[69992]: DEBUG oslo_vmware.api [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for the task: (returnval){ [ 1438.516819] env[69992]: value = "task-2898123" [ 1438.516819] env[69992]: _type = "Task" [ 1438.516819] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.524752] env[69992]: DEBUG oslo_vmware.api [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.618608] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1438.620674] env[69992]: INFO nova.compute.manager [-] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Took 1.23 seconds to deallocate network for instance. [ 1438.650254] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.650381] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1438.650709] env[69992]: DEBUG nova.network.neutron [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1438.912524] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.976542] env[69992]: INFO nova.scheduler.client.report [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocation for migration b98c3a7b-4cb1-4393-bfc7-46c47f19ce94 [ 1439.028023] env[69992]: DEBUG oslo_vmware.api [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Task: {'id': task-2898123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137429} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.028023] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1439.028169] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1439.028282] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1439.028447] env[69992]: INFO nova.compute.manager [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1439.028677] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1439.032017] env[69992]: DEBUG nova.compute.manager [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1439.032017] env[69992]: DEBUG nova.network.neutron [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1439.127399] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1439.159589] env[69992]: DEBUG nova.network.neutron [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Updating instance_info_cache with network_info: [{"id": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "address": "fa:16:3e:c3:fe:08", "network": {"id": "4a5b0621-bdd9-45b6-90b7-f6389406ef1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-414485365", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e4cacd9-f0", "ovs_interfaceid": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3b4a3277-a4e1-4872-87b4-f4fcaadff6bc", "address": "fa:16:3e:5a:7c:16", "network": {"id": "e51cea75-4a77-4eac-9933-0254ff044517", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2066694832", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b4a3277-a4", "ovs_interfaceid": "3b4a3277-a4e1-4872-87b4-f4fcaadff6bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.199162] env[69992]: DEBUG nova.network.neutron [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1439.349715] env[69992]: DEBUG nova.network.neutron [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updating instance_info_cache with network_info: [{"id": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "address": "fa:16:3e:6a:b0:aa", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7f4aa4-cd", "ovs_interfaceid": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.482511] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.118s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1439.545480] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f3c253-a1cd-4dd6-9eff-bfbee55ec074 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.553255] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d511832-5b23-49e8-b899-aa185665532b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.584513] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b515a0-d59e-408d-97e0-0b07ea18753d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.591693] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dc212f-d827-4b82-bcfd-ffc2f4de70d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.605291] env[69992]: DEBUG nova.compute.provider_tree [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.662683] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Releasing lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1439.663052] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Instance network_info: |[{"id": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "address": "fa:16:3e:c3:fe:08", "network": {"id": "4a5b0621-bdd9-45b6-90b7-f6389406ef1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-414485365", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e4cacd9-f0", "ovs_interfaceid": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3b4a3277-a4e1-4872-87b4-f4fcaadff6bc", "address": "fa:16:3e:5a:7c:16", "network": {"id": "e51cea75-4a77-4eac-9933-0254ff044517", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2066694832", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b4a3277-a4", "ovs_interfaceid": "3b4a3277-a4e1-4872-87b4-f4fcaadff6bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1439.663401] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Acquired lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1439.663586] env[69992]: DEBUG nova.network.neutron [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Refreshing network info cache for port 3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.664786] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:fe:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e4cacd9-f025-4a13-8d5b-615b3c9e12a3', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:7c:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bff6c3a1-cc80-46ca-86c0-6dbb029edddb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b4a3277-a4e1-4872-87b4-f4fcaadff6bc', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1439.679716] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1439.684663] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1439.685325] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ee08616-185f-4351-93e9-4c895405dd38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.707992] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1439.707992] env[69992]: value = "task-2898124" [ 1439.707992] env[69992]: _type = "Task" [ 1439.707992] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.715837] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898124, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.817453] env[69992]: DEBUG nova.network.neutron [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.852768] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1439.853078] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Instance network_info: |[{"id": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "address": "fa:16:3e:6a:b0:aa", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7f4aa4-cd", "ovs_interfaceid": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1439.853531] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:b0:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c7f4aa4-cd49-487f-8637-9ee035bbab41', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1439.861365] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1439.861930] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1439.861930] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16f77d6f-1626-4986-b2d5-c2f8593acd3f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.883182] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1439.883182] env[69992]: value = "task-2898125" [ 1439.883182] env[69992]: _type = "Task" [ 1439.883182] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.890856] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1439.891052] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1439.892273] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898125, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.929747] env[69992]: DEBUG nova.network.neutron [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Updated VIF entry in instance network info cache for port 3b4a3277-a4e1-4872-87b4-f4fcaadff6bc. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1439.930206] env[69992]: DEBUG nova.network.neutron [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Updating instance_info_cache with network_info: [{"id": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "address": "fa:16:3e:c3:fe:08", "network": {"id": "4a5b0621-bdd9-45b6-90b7-f6389406ef1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-414485365", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e4cacd9-f0", "ovs_interfaceid": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3b4a3277-a4e1-4872-87b4-f4fcaadff6bc", "address": "fa:16:3e:5a:7c:16", "network": {"id": "e51cea75-4a77-4eac-9933-0254ff044517", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2066694832", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b4a3277-a4", "ovs_interfaceid": "3b4a3277-a4e1-4872-87b4-f4fcaadff6bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.108863] env[69992]: DEBUG nova.scheduler.client.report [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1440.147383] env[69992]: DEBUG nova.compute.manager [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Received event network-changed-7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1440.147707] env[69992]: DEBUG nova.compute.manager [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Refreshing instance network info cache due to event network-changed-7c7f4aa4-cd49-487f-8637-9ee035bbab41. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1440.147874] env[69992]: DEBUG oslo_concurrency.lockutils [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] Acquiring lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.148021] env[69992]: DEBUG oslo_concurrency.lockutils [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] Acquired lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.148186] env[69992]: DEBUG nova.network.neutron [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Refreshing network info cache for port 7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1440.218238] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898124, 'name': CreateVM_Task, 'duration_secs': 0.442354} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.218409] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1440.219194] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.219368] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.219689] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1440.219938] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b666768c-80a9-4bdb-8379-7d4f534b6a35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.225019] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1440.225019] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529a0506-fc16-930f-0a7a-216bd6a06d9f" [ 1440.225019] env[69992]: _type = "Task" [ 1440.225019] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.232927] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529a0506-fc16-930f-0a7a-216bd6a06d9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.319671] env[69992]: INFO nova.compute.manager [-] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Took 1.29 seconds to deallocate network for instance. [ 1440.396826] env[69992]: INFO nova.compute.manager [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Detaching volume 38a23a44-927a-49f0-af50-0d71be5adb30 [ 1440.398799] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898125, 'name': CreateVM_Task, 'duration_secs': 0.339145} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.399141] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1440.399751] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.432779] env[69992]: INFO nova.virt.block_device [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Attempting to driver detach volume 38a23a44-927a-49f0-af50-0d71be5adb30 from mountpoint /dev/sdb [ 1440.433031] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1440.433230] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1440.434209] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Releasing lock "refresh_cache-9d290fe7-12d2-416e-9608-7a8e7e9b2f65" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.434209] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Received event network-vif-deleted-2938c230-8b81-4b8d-96c2-6cf38f5d97af {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1440.434463] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Received event network-vif-deleted-80bd277f-8072-43a4-a5a0-6c9f7e01f1a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1440.434531] env[69992]: INFO nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Neutron deleted interface 80bd277f-8072-43a4-a5a0-6c9f7e01f1a8; detaching it from the instance and deleting it from the info cache [ 1440.434690] env[69992]: DEBUG nova.network.neutron [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.436891] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e4a870-c3ff-48c6-888d-f8e23af5e052 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.460425] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cbf1de-c8e9-45f2-b671-944d4cfa6186 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.470680] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebc26a1-a0db-4043-8b3d-f2eab1cbe4a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.496851] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f80e2f-8678-48ee-b729-130bf6c7f323 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.511235] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] The volume has not been displaced from its original location: [datastore1] volume-38a23a44-927a-49f0-af50-0d71be5adb30/volume-38a23a44-927a-49f0-af50-0d71be5adb30.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1440.516973] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1440.517267] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c4c3903-08b6-417c-b1ba-fc31f7406118 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.535474] env[69992]: DEBUG oslo_vmware.api [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1440.535474] env[69992]: value = "task-2898126" [ 1440.535474] env[69992]: _type = "Task" [ 1440.535474] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.542925] env[69992]: DEBUG oslo_vmware.api [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898126, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.613443] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.613989] env[69992]: DEBUG nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1440.616694] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.704s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.616932] env[69992]: DEBUG nova.objects.instance [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lazy-loading 'resources' on Instance uuid c2268475-6506-4c1f-8f8a-7b8d3a5cb28c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.735755] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529a0506-fc16-930f-0a7a-216bd6a06d9f, 'name': SearchDatastore_Task, 'duration_secs': 0.008942} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.738263] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.738504] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1440.738735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.738911] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.739091] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1440.739375] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.739679] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1440.739899] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9eb19b68-b7de-4dfb-9e8b-682d61bfebd3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.741943] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-296763d7-6fb1-4bc6-8c8b-54712405aee9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.747630] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1440.747630] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528e7f3a-c863-c89b-4213-f370babb30de" [ 1440.747630] env[69992]: _type = "Task" [ 1440.747630] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.751407] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1440.751499] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1440.752481] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee0ff963-4bc5-4a38-99df-d7f48778764d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.757235] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528e7f3a-c863-c89b-4213-f370babb30de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.761611] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1440.761611] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5270f364-29ed-7c95-9bd5-05c343389643" [ 1440.761611] env[69992]: _type = "Task" [ 1440.761611] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.768220] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5270f364-29ed-7c95-9bd5-05c343389643, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.827687] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.859315] env[69992]: DEBUG nova.network.neutron [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updated VIF entry in instance network info cache for port 7c7f4aa4-cd49-487f-8637-9ee035bbab41. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.859703] env[69992]: DEBUG nova.network.neutron [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updating instance_info_cache with network_info: [{"id": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "address": "fa:16:3e:6a:b0:aa", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7f4aa4-cd", "ovs_interfaceid": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.892472] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.892703] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.892911] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.893117] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.893328] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.895504] env[69992]: INFO nova.compute.manager [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Terminating instance [ 1440.911061] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.940531] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f1032e6-7c08-4dab-9376-fe5a5b71d4c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.952267] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532f8dc2-07ce-4f52-819e-f9a51a9a8d96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.981950] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Detach interface failed, port_id=80bd277f-8072-43a4-a5a0-6c9f7e01f1a8, reason: Instance fcbe1142-72dc-4a02-af9b-e03a2031a247 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1440.982207] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Received event network-vif-plugged-7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1440.982415] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1440.982629] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1440.982797] env[69992]: DEBUG oslo_concurrency.lockutils [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.982962] env[69992]: DEBUG nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] No waiting events found dispatching network-vif-plugged-7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1440.983141] env[69992]: WARNING nova.compute.manager [req-cb13476f-5d79-42b6-b049-cf541f01abbe req-818e0947-49cc-4257-a8c4-d0c3f75b4889 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Received unexpected event network-vif-plugged-7c7f4aa4-cd49-487f-8637-9ee035bbab41 for instance with vm_state building and task_state spawning. [ 1441.045288] env[69992]: DEBUG oslo_vmware.api [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898126, 'name': ReconfigVM_Task, 'duration_secs': 0.224769} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.045560] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1441.050152] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f64a823-bca2-4b26-ad25-d183ed46409e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.066015] env[69992]: DEBUG oslo_vmware.api [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1441.066015] env[69992]: value = "task-2898127" [ 1441.066015] env[69992]: _type = "Task" [ 1441.066015] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.073330] env[69992]: DEBUG oslo_vmware.api [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898127, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.122854] env[69992]: DEBUG nova.compute.utils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1441.125104] env[69992]: DEBUG nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1441.252785] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3cca4a-ba3a-46d3-b595-29025d845b9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.260930] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528e7f3a-c863-c89b-4213-f370babb30de, 'name': SearchDatastore_Task, 'duration_secs': 0.008858} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.262644] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1441.262895] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1441.263135] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.267038] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cd5767-5c41-43a6-9369-62f841f15c54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.276067] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5270f364-29ed-7c95-9bd5-05c343389643, 'name': SearchDatastore_Task, 'duration_secs': 0.007679} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.304394] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cb77521-ff72-46ad-b9c9-5765b780db2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.307691] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a44848-cf76-4c54-ad2f-c33a0dbc83d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.316522] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7906a275-d8a5-412c-b405-7e4c4d1d3799 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.320335] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1441.320335] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527027a5-8b37-46f1-c254-6168a1418aee" [ 1441.320335] env[69992]: _type = "Task" [ 1441.320335] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.332298] env[69992]: DEBUG nova.compute.provider_tree [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.338157] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527027a5-8b37-46f1-c254-6168a1418aee, 'name': SearchDatastore_Task, 'duration_secs': 0.009522} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.338397] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1441.338647] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 9d290fe7-12d2-416e-9608-7a8e7e9b2f65/9d290fe7-12d2-416e-9608-7a8e7e9b2f65.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1441.338898] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1441.339093] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1441.339287] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f449c539-8e32-431a-a522-3934d11e873f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.341404] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7eb437ad-f703-456f-a161-47059518ad35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.347124] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1441.347124] env[69992]: value = "task-2898128" [ 1441.347124] env[69992]: _type = "Task" [ 1441.347124] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.350718] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1441.350892] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1441.351804] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d408f2cb-a524-4304-a9cd-076cf0f009fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.356527] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.359800] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1441.359800] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52637789-2e9a-39ed-4772-639afbace223" [ 1441.359800] env[69992]: _type = "Task" [ 1441.359800] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.362897] env[69992]: DEBUG oslo_concurrency.lockutils [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] Releasing lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1441.363175] env[69992]: DEBUG nova.compute.manager [req-6257e337-ed2c-4c9d-b916-c7461defaf51 req-67bc30d9-bf95-4684-bf0d-268c6eb7b257 service nova] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Received event network-vif-deleted-0042c1e4-d906-4261-a18e-ce232533cbdd {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1441.367667] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52637789-2e9a-39ed-4772-639afbace223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.398912] env[69992]: DEBUG nova.compute.manager [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1441.399707] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1441.400141] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d69a16-1fe4-43a6-9d05-2f09fcf5fe83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.408991] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1441.409267] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdea00f2-c5f1-43bc-8d29-9a4abfb93878 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.419045] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1441.419045] env[69992]: value = "task-2898129" [ 1441.419045] env[69992]: _type = "Task" [ 1441.419045] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.419568] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.423807] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.431210] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.577772] env[69992]: DEBUG oslo_vmware.api [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898127, 'name': ReconfigVM_Task, 'duration_secs': 0.141996} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.578129] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582109', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'name': 'volume-38a23a44-927a-49f0-af50-0d71be5adb30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9464339a-b760-47e9-bc75-e88ce18bf71b', 'attached_at': '', 'detached_at': '', 'volume_id': '38a23a44-927a-49f0-af50-0d71be5adb30', 'serial': '38a23a44-927a-49f0-af50-0d71be5adb30'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1441.626193] env[69992]: DEBUG nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1441.835314] env[69992]: DEBUG nova.scheduler.client.report [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1441.856935] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898128, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.868518] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52637789-2e9a-39ed-4772-639afbace223, 'name': SearchDatastore_Task, 'duration_secs': 0.007666} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.869203] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ef54423-2368-43e9-b9b8-02a4b6baa8a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.873793] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1441.873793] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52952baf-8456-d15b-cb2f-cc8518d95915" [ 1441.873793] env[69992]: _type = "Task" [ 1441.873793] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.880572] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52952baf-8456-d15b-cb2f-cc8518d95915, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.929390] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Getting list of instances from cluster (obj){ [ 1441.929390] env[69992]: value = "domain-c8" [ 1441.929390] env[69992]: _type = "ClusterComputeResource" [ 1441.929390] env[69992]: } {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1441.929661] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898129, 'name': PowerOffVM_Task, 'duration_secs': 0.170677} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.930486] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bded0a5-fa18-44ea-a743-5f5e25f5c8aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.933362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1441.933527] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1441.933746] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99181367-4252-402c-9f28-f256654ec738 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.948510] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Got total of 6 instances {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1441.948651] env[69992]: WARNING nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] While synchronizing instance power states, found 10 instances in the database and 6 instances on the hypervisor. [ 1441.948790] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid fcbe1142-72dc-4a02-af9b-e03a2031a247 {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.948974] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.949143] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.949294] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid 9464339a-b760-47e9-bc75-e88ce18bf71b {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.949463] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid d50d7460-2b70-45bc-940f-7d45f329fa1c {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.949587] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.949734] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid c2268475-6506-4c1f-8f8a-7b8d3a5cb28c {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.949876] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid 9d290fe7-12d2-416e-9608-7a8e7e9b2f65 {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.950032] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid 5df7d031-66bf-43eb-a05b-07b6cff9db59 {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.950183] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid 7c8b830a-e89c-4d97-a987-141797aaa55f {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1441.950474] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.950705] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.950896] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1441.951148] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.951351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.951545] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.951737] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.951912] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1441.952155] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.952401] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.952617] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.952814] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "7c8b830a-e89c-4d97-a987-141797aaa55f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1441.953608] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ab6329-7172-4fd3-9e11-98ac5fc69cc8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.956410] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc38d88-2a7e-4891-9f4c-afcda55621ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.998977] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1441.999217] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1441.999400] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleting the datastore file [datastore2] d50d7460-2b70-45bc-940f-7d45f329fa1c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1441.999666] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1284715f-0c41-4be4-9cf1-a41644e07707 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.006822] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for the task: (returnval){ [ 1442.006822] env[69992]: value = "task-2898131" [ 1442.006822] env[69992]: _type = "Task" [ 1442.006822] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.014281] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.126878] env[69992]: DEBUG nova.objects.instance [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'flavor' on Instance uuid 9464339a-b760-47e9-bc75-e88ce18bf71b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1442.340979] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.724s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.343624] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.216s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1442.343850] env[69992]: DEBUG nova.objects.instance [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lazy-loading 'resources' on Instance uuid fcbe1142-72dc-4a02-af9b-e03a2031a247 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1442.357989] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509828} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.358247] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 9d290fe7-12d2-416e-9608-7a8e7e9b2f65/9d290fe7-12d2-416e-9608-7a8e7e9b2f65.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1442.358455] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1442.359358] env[69992]: INFO nova.scheduler.client.report [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Deleted allocations for instance c2268475-6506-4c1f-8f8a-7b8d3a5cb28c [ 1442.360284] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5922fe9c-7f25-4500-b817-15be9a123b59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.369411] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1442.369411] env[69992]: value = "task-2898132" [ 1442.369411] env[69992]: _type = "Task" [ 1442.369411] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.380279] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898132, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.386103] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52952baf-8456-d15b-cb2f-cc8518d95915, 'name': SearchDatastore_Task, 'duration_secs': 0.007847} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.386456] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1442.386761] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 5df7d031-66bf-43eb-a05b-07b6cff9db59/5df7d031-66bf-43eb-a05b-07b6cff9db59.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1442.387034] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2305909a-ab77-4326-85b1-81de18f58ecb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.393636] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1442.393636] env[69992]: value = "task-2898133" [ 1442.393636] env[69992]: _type = "Task" [ 1442.393636] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.400984] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.463978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.513s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.467546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.515s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.516483] env[69992]: DEBUG oslo_vmware.api [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Task: {'id': task-2898131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165533} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.516763] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1442.516947] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1442.517098] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1442.517280] env[69992]: INFO nova.compute.manager [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1442.517527] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1442.517729] env[69992]: DEBUG nova.compute.manager [-] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1442.517817] env[69992]: DEBUG nova.network.neutron [-] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1442.638920] env[69992]: DEBUG nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1442.667424] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1442.667675] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.667833] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1442.668032] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.668188] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1442.668337] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1442.668546] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1442.668750] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1442.668871] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1442.669043] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1442.669220] env[69992]: DEBUG nova.virt.hardware [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1442.670097] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b5fb24-b027-4afe-a5a4-bbb4953b4b50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.678342] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbf7f3f-1141-4158-bee5-f2141e84fd70 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.693874] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1442.699628] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Creating folder: Project (f286d3b663844f729d071d6d4d4e9073). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.699955] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e032ea2-3d32-44d7-956a-c2f2a40d6f2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.709782] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Created folder: Project (f286d3b663844f729d071d6d4d4e9073) in parent group-v581821. [ 1442.710099] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Creating folder: Instances. Parent ref: group-v582133. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.710381] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38307fa0-86f4-4e9c-ae33-becd8c7dfc5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.718784] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Created folder: Instances in parent group-v582133. [ 1442.719060] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1442.719315] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1442.719579] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24f8a7e3-112d-4a5c-be35-b2366267d8dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.738239] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1442.738239] env[69992]: value = "task-2898136" [ 1442.738239] env[69992]: _type = "Task" [ 1442.738239] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.746649] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898136, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.869123] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0ecc169f-5782-4611-b95c-4bd2c72c9650 tempest-ImagesOneServerNegativeTestJSON-1532545984 tempest-ImagesOneServerNegativeTestJSON-1532545984-project-member] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.581s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.870788] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.918s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1442.874700] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0b7b541-7c49-48be-bc8a-2ef34c78f3c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.887067] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898132, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.249661} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.889252] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1442.890841] env[69992]: DEBUG nova.compute.manager [req-baf60e00-cddd-4fbf-a73b-2da1bfe1046a req-aa65849b-6d16-4817-a74d-81cb82ca5169 service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Received event network-vif-deleted-0b3a8c02-b431-4538-b679-fba08b7e9e8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1442.891022] env[69992]: INFO nova.compute.manager [req-baf60e00-cddd-4fbf-a73b-2da1bfe1046a req-aa65849b-6d16-4817-a74d-81cb82ca5169 service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Neutron deleted interface 0b3a8c02-b431-4538-b679-fba08b7e9e8e; detaching it from the instance and deleting it from the info cache [ 1442.891110] env[69992]: DEBUG nova.network.neutron [req-baf60e00-cddd-4fbf-a73b-2da1bfe1046a req-aa65849b-6d16-4817-a74d-81cb82ca5169 service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.897095] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df6978d-b1ec-4600-804c-065a435b9e04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.903195] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6b06b5-0f2f-4aad-99bb-c6e7fa2047b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.946278] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 9d290fe7-12d2-416e-9608-7a8e7e9b2f65/9d290fe7-12d2-416e-9608-7a8e7e9b2f65.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1442.961956] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efea2f57-fe42-4186-9102-f23b7fb66931 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.977310] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898133, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.988369] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1442.988369] env[69992]: value = "task-2898137" [ 1442.988369] env[69992]: _type = "Task" [ 1442.988369] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.006168] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898137, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.072543] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934e90dc-814d-4ec2-a075-b628f8d97f8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.080154] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3d128f-27a5-4c30-a4e5-2238fb8f5b5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.111697] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb7e2bf-23af-4679-a70f-0ddabc969f59 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.119615] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb2aa74-f481-4011-851b-ac1d4f6948ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.134698] env[69992]: DEBUG nova.compute.provider_tree [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1443.142858] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e60fac0f-952c-44d3-83cc-2a9ee6a83f31 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.252s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.144426] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.193s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1443.145476] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab1c093-f3f2-4c7a-9d32-86334351bbb4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.248391] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898136, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.369708] env[69992]: DEBUG nova.network.neutron [-] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.394398] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b67b214c-f958-4f75-ada1-7ccd3f4c3569 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.404720] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d11afc6-4902-4b06-ac7f-2f55660aebd1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.424391] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.672246} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.424635] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 5df7d031-66bf-43eb-a05b-07b6cff9db59/5df7d031-66bf-43eb-a05b-07b6cff9db59.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1443.424842] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1443.425083] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e6d381b-4963-4634-ae6f-f9f6515339ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.436357] env[69992]: DEBUG nova.compute.manager [req-baf60e00-cddd-4fbf-a73b-2da1bfe1046a req-aa65849b-6d16-4817-a74d-81cb82ca5169 service nova] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Detach interface failed, port_id=0b3a8c02-b431-4538-b679-fba08b7e9e8e, reason: Instance d50d7460-2b70-45bc-940f-7d45f329fa1c could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1443.437690] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1443.437690] env[69992]: value = "task-2898138" [ 1443.437690] env[69992]: _type = "Task" [ 1443.437690] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.480978] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "c2268475-6506-4c1f-8f8a-7b8d3a5cb28c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.611s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.498888] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898137, 'name': ReconfigVM_Task, 'duration_secs': 0.330691} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.499153] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 9d290fe7-12d2-416e-9608-7a8e7e9b2f65/9d290fe7-12d2-416e-9608-7a8e7e9b2f65.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1443.499823] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6cc3d03-4c1b-4c8c-b521-b4a577379ad4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.506252] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1443.506252] env[69992]: value = "task-2898139" [ 1443.506252] env[69992]: _type = "Task" [ 1443.506252] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.514540] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898139, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.637941] env[69992]: DEBUG nova.scheduler.client.report [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1443.654842] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.752585] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898136, 'name': CreateVM_Task, 'duration_secs': 0.53208} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.752875] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1443.753205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.754178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1443.754178] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1443.754178] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-333dd47d-080f-4575-a095-c8858d39074d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.759676] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1443.759676] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52e117b4-56cd-5ed4-4e17-35d11dd40175" [ 1443.759676] env[69992]: _type = "Task" [ 1443.759676] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.767501] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e117b4-56cd-5ed4-4e17-35d11dd40175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.872043] env[69992]: INFO nova.compute.manager [-] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Took 1.35 seconds to deallocate network for instance. [ 1443.947636] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078623} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.948022] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1443.948611] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1d12f1-14f2-498d-b8e1-f4a99e2f7572 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.969876] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 5df7d031-66bf-43eb-a05b-07b6cff9db59/5df7d031-66bf-43eb-a05b-07b6cff9db59.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1443.970300] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-078c50c6-69dd-4710-b692-ad5adb5b6d0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.990365] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1443.990365] env[69992]: value = "task-2898140" [ 1443.990365] env[69992]: _type = "Task" [ 1443.990365] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.000721] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898140, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.014476] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898139, 'name': Rename_Task, 'duration_secs': 0.175853} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.014737] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1444.015038] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dc240c6-46ca-40a9-9b61-127b42464ffc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.023269] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1444.023269] env[69992]: value = "task-2898141" [ 1444.023269] env[69992]: _type = "Task" [ 1444.023269] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.030770] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.143498] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.800s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1444.145904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.318s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.146155] env[69992]: DEBUG nova.objects.instance [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lazy-loading 'resources' on Instance uuid 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.168364] env[69992]: INFO nova.scheduler.client.report [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Deleted allocations for instance fcbe1142-72dc-4a02-af9b-e03a2031a247 [ 1444.270429] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52e117b4-56cd-5ed4-4e17-35d11dd40175, 'name': SearchDatastore_Task, 'duration_secs': 0.009593} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.270732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1444.270968] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1444.271224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.271376] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1444.271556] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1444.271924] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.272163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.272369] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "9464339a-b760-47e9-bc75-e88ce18bf71b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.272549] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.272713] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1444.275078] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b79e5f38-b630-4b00-bebf-28ede3d95f15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.277733] env[69992]: INFO nova.compute.manager [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Terminating instance [ 1444.286494] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1444.286867] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1444.287923] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73f263e5-7d25-4384-8682-87395250a235 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.294385] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1444.294385] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec906a-b1b5-2ce6-41d1-116c63b07e81" [ 1444.294385] env[69992]: _type = "Task" [ 1444.294385] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.303805] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec906a-b1b5-2ce6-41d1-116c63b07e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.379191] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1444.503579] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898140, 'name': ReconfigVM_Task, 'duration_secs': 0.2784} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.503777] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 5df7d031-66bf-43eb-a05b-07b6cff9db59/5df7d031-66bf-43eb-a05b-07b6cff9db59.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1444.504444] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be8bd7e5-af95-49b6-b480-8fd3b3847ff1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.511209] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1444.511209] env[69992]: value = "task-2898142" [ 1444.511209] env[69992]: _type = "Task" [ 1444.511209] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.519838] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898142, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.535933] env[69992]: DEBUG oslo_vmware.api [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898141, 'name': PowerOnVM_Task, 'duration_secs': 0.492811} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.536341] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.536643] env[69992]: INFO nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Took 11.08 seconds to spawn the instance on the hypervisor. [ 1444.536857] env[69992]: DEBUG nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1444.537659] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2166e583-6f93-401a-8ab5-89bfbd225649 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.680592] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71778acd-16a9-4cbd-bf5c-3424f3602619 tempest-ServersTestJSON-1985445483 tempest-ServersTestJSON-1985445483-project-member] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.461s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1444.681578] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.731s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1444.685286] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90bf174d-eb42-42ab-9ee8-8cf8e428a365 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.697749] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd1f333-1b5b-4a50-82b8-2e4dbe54a7bf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.782289] env[69992]: DEBUG nova.compute.manager [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1444.782289] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1444.783671] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40d7f30-194a-4005-8fc9-51215a245978 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.788544] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b067350-15e0-43ca-a7c9-c60def5e0173 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.792959] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1444.793554] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-230b655f-8384-44c3-9939-e589009e7fdd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.800634] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b8dea6-4dc7-46b4-979f-ec6058828bea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.807957] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52ec906a-b1b5-2ce6-41d1-116c63b07e81, 'name': SearchDatastore_Task, 'duration_secs': 0.008364} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.808304] env[69992]: DEBUG oslo_vmware.api [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1444.808304] env[69992]: value = "task-2898143" [ 1444.808304] env[69992]: _type = "Task" [ 1444.808304] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.809438] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c120704d-7185-4776-855c-70c605725520 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.842505] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48605382-76a9-4b01-b2a5-734b7720d6e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.849591] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1444.849591] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52422ab2-a508-63c4-f180-7d1518420a43" [ 1444.849591] env[69992]: _type = "Task" [ 1444.849591] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.849942] env[69992]: DEBUG oslo_vmware.api [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.857132] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab7b107-cdd1-4421-aa50-7871fa0108a6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.865657] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52422ab2-a508-63c4-f180-7d1518420a43, 'name': SearchDatastore_Task, 'duration_secs': 0.009323} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.866260] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1444.866557] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 7c8b830a-e89c-4d97-a987-141797aaa55f/7c8b830a-e89c-4d97-a987-141797aaa55f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1444.866838] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae018e8f-316d-42a9-88ae-3241ddb4e962 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.876596] env[69992]: DEBUG nova.compute.provider_tree [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.883794] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1444.883794] env[69992]: value = "task-2898144" [ 1444.883794] env[69992]: _type = "Task" [ 1444.883794] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.892861] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.024379] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898142, 'name': Rename_Task, 'duration_secs': 0.141146} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.024708] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.025015] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9b994fd-2938-4e4a-9e02-74c7f7722333 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.032651] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1445.032651] env[69992]: value = "task-2898145" [ 1445.032651] env[69992]: _type = "Task" [ 1445.032651] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.042427] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898145, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.060319] env[69992]: INFO nova.compute.manager [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Took 19.58 seconds to build instance. [ 1445.235153] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "fcbe1142-72dc-4a02-af9b-e03a2031a247" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.553s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.320095] env[69992]: DEBUG oslo_vmware.api [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898143, 'name': PowerOffVM_Task, 'duration_secs': 0.241548} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.320234] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.320327] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1445.320652] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcb6e13c-184a-4f5e-bc74-4c7a045da5f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.380047] env[69992]: DEBUG nova.scheduler.client.report [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1445.387201] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1445.387201] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1445.387954] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore2] 9464339a-b760-47e9-bc75-e88ce18bf71b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1445.391493] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4675b688-5721-4375-8499-3340add5daec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.398702] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4835} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.400720] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 7c8b830a-e89c-4d97-a987-141797aaa55f/7c8b830a-e89c-4d97-a987-141797aaa55f.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1445.400998] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1445.401426] env[69992]: DEBUG oslo_vmware.api [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1445.401426] env[69992]: value = "task-2898147" [ 1445.401426] env[69992]: _type = "Task" [ 1445.401426] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.401588] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45f65cfd-5d5b-4655-bb51-083bc663f6fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.411963] env[69992]: DEBUG oslo_vmware.api [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.413382] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1445.413382] env[69992]: value = "task-2898148" [ 1445.413382] env[69992]: _type = "Task" [ 1445.413382] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.421902] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898148, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.547296] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898145, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.564866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63551b7b-4f40-4a94-8778-78f7b8f6e0d4 tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.088s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.565298] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.613s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.565576] env[69992]: INFO nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] During sync_power_state the instance has a pending task (spawning). Skip. [ 1445.565725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.888953] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.892025] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.513s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1445.892356] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.916733] env[69992]: DEBUG oslo_vmware.api [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162763} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.916733] env[69992]: INFO nova.scheduler.client.report [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Deleted allocations for instance d50d7460-2b70-45bc-940f-7d45f329fa1c [ 1445.917729] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1445.917729] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1445.917729] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1445.917729] env[69992]: INFO nova.compute.manager [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1445.921068] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1445.924259] env[69992]: INFO nova.scheduler.client.report [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Deleted allocations for instance 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7 [ 1445.926112] env[69992]: DEBUG nova.compute.manager [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1445.926112] env[69992]: DEBUG nova.network.neutron [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1445.935498] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898148, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065516} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.935757] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1445.936648] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da4c349-9f72-4f48-a43a-c27ab43d74cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.958876] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 7c8b830a-e89c-4d97-a987-141797aaa55f/7c8b830a-e89c-4d97-a987-141797aaa55f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1445.958876] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d7be89b-4133-4c48-81c0-d5cdadcf4501 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.980674] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1445.980674] env[69992]: value = "task-2898149" [ 1445.980674] env[69992]: _type = "Task" [ 1445.980674] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.990813] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.044113] env[69992]: DEBUG oslo_vmware.api [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898145, 'name': PowerOnVM_Task, 'duration_secs': 0.671782} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.044734] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.045011] env[69992]: INFO nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Took 8.16 seconds to spawn the instance on the hypervisor. [ 1446.045221] env[69992]: DEBUG nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1446.046531] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8c7c4a-e16a-449b-9642-37fdc180b185 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.247258] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1446.247620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1446.251022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1446.251022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1446.251022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.251427] env[69992]: INFO nova.compute.manager [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Terminating instance [ 1446.435413] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6b0536f8-188c-4c52-8b1d-12d52fc2f65a tempest-ServersNegativeTestJSON-1203889377 tempest-ServersNegativeTestJSON-1203889377-project-member] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.057s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.436622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-07c0f19a-ff91-4af0-a8a3-6c3962309da4 tempest-DeleteServersTestJSON-1168726256 tempest-DeleteServersTestJSON-1168726256-project-member] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.544s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.437519] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.486s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1446.437812] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.486s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1446.439761] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0cf3e32-faf1-4e2f-981a-411899bca6d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.442386] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af2b7151-98db-499d-80c0-c412ea326c47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.463631] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44c72dd-5758-43f3-9408-ac627787852d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.475314] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9457e5fe-70f5-41a2-a32f-1f52693e1cab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.520105] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898149, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.568917] env[69992]: INFO nova.compute.manager [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Took 17.09 seconds to build instance. [ 1446.667719] env[69992]: DEBUG nova.compute.manager [req-3ea01893-0ba7-42b4-867c-c430c483d874 req-5b67e720-aae9-4cff-84eb-731c02eced00 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Received event network-vif-deleted-1f44518f-713e-4671-bc22-96c67ac28c8e {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1446.668069] env[69992]: INFO nova.compute.manager [req-3ea01893-0ba7-42b4-867c-c430c483d874 req-5b67e720-aae9-4cff-84eb-731c02eced00 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Neutron deleted interface 1f44518f-713e-4671-bc22-96c67ac28c8e; detaching it from the instance and deleting it from the info cache [ 1446.668358] env[69992]: DEBUG nova.network.neutron [req-3ea01893-0ba7-42b4-867c-c430c483d874 req-5b67e720-aae9-4cff-84eb-731c02eced00 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.761294] env[69992]: DEBUG nova.compute.manager [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1446.761550] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1446.762457] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a03412c-9082-4358-8d37-9779135cb236 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.770657] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1446.770741] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa9cfd22-da2c-4c9a-bb59-869274959052 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.776521] env[69992]: DEBUG oslo_vmware.api [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1446.776521] env[69992]: value = "task-2898151" [ 1446.776521] env[69992]: _type = "Task" [ 1446.776521] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.785364] env[69992]: DEBUG oslo_vmware.api [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.996377] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898149, 'name': ReconfigVM_Task, 'duration_secs': 0.731936} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.996650] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 7c8b830a-e89c-4d97-a987-141797aaa55f/7c8b830a-e89c-4d97-a987-141797aaa55f.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1446.997296] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab198da3-228b-4f53-978c-0b650a1e7e80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.005104] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1447.005104] env[69992]: value = "task-2898152" [ 1447.005104] env[69992]: _type = "Task" [ 1447.005104] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.015464] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898152, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.022807] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "d50d7460-2b70-45bc-940f-7d45f329fa1c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.585s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.023452] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.586s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.071298] env[69992]: DEBUG oslo_concurrency.lockutils [None req-49c6fb87-6964-40f0-9f8f-3c7b6785f2b8 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.597s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.071633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.119s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.071832] env[69992]: INFO nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] During sync_power_state the instance has a pending task (spawning). Skip. [ 1447.072055] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.148473] env[69992]: DEBUG nova.network.neutron [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.172118] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fc27ee78-6d39-4996-b4a6-1e5518574df1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.187155] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f96ef4-fb88-4972-a3a3-fbcb53bf7b22 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.224439] env[69992]: DEBUG nova.compute.manager [req-3ea01893-0ba7-42b4-867c-c430c483d874 req-5b67e720-aae9-4cff-84eb-731c02eced00 service nova] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Detach interface failed, port_id=1f44518f-713e-4671-bc22-96c67ac28c8e, reason: Instance 9464339a-b760-47e9-bc75-e88ce18bf71b could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1447.295038] env[69992]: DEBUG oslo_vmware.api [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898151, 'name': PowerOffVM_Task, 'duration_secs': 0.365781} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.295353] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1447.295611] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1447.295869] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6190d79b-3b84-4fde-b0f8-78ded6807771 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.397413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1447.397724] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1447.397859] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Deleting the datastore file [datastore1] 9d290fe7-12d2-416e-9608-7a8e7e9b2f65 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1447.398492] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d83ad907-37db-4b19-8172-8e7f258005ba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.406357] env[69992]: DEBUG oslo_vmware.api [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for the task: (returnval){ [ 1447.406357] env[69992]: value = "task-2898154" [ 1447.406357] env[69992]: _type = "Task" [ 1447.406357] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.419143] env[69992]: DEBUG oslo_vmware.api [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.516127] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898152, 'name': Rename_Task, 'duration_secs': 0.176988} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.516417] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1447.516665] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfc3bf21-3364-4145-b792-5f821f24b401 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.524455] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.524455] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.524455] env[69992]: INFO nova.compute.manager [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Shelving [ 1447.528051] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1447.528051] env[69992]: value = "task-2898155" [ 1447.528051] env[69992]: _type = "Task" [ 1447.528051] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.534148] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.651359] env[69992]: INFO nova.compute.manager [-] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Took 1.73 seconds to deallocate network for instance. [ 1447.921012] env[69992]: DEBUG oslo_vmware.api [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Task: {'id': task-2898154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.424612} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.921645] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.921920] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1447.922733] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1447.922733] env[69992]: INFO nova.compute.manager [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1447.922888] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1447.923809] env[69992]: DEBUG nova.compute.manager [-] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1447.923809] env[69992]: DEBUG nova.network.neutron [-] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1448.046244] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898155, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.159997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1448.159997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1448.159997] env[69992]: DEBUG nova.objects.instance [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'resources' on Instance uuid 9464339a-b760-47e9-bc75-e88ce18bf71b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.499854] env[69992]: DEBUG nova.compute.manager [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Received event network-changed-7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1448.503169] env[69992]: DEBUG nova.compute.manager [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Refreshing instance network info cache due to event network-changed-7c7f4aa4-cd49-487f-8637-9ee035bbab41. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1448.503583] env[69992]: DEBUG oslo_concurrency.lockutils [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] Acquiring lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.503672] env[69992]: DEBUG oslo_concurrency.lockutils [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] Acquired lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1448.503840] env[69992]: DEBUG nova.network.neutron [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Refreshing network info cache for port 7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.538968] env[69992]: DEBUG oslo_vmware.api [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898155, 'name': PowerOnVM_Task, 'duration_secs': 0.898854} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.539255] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1448.539461] env[69992]: INFO nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Took 5.90 seconds to spawn the instance on the hypervisor. [ 1448.539633] env[69992]: DEBUG nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1448.540450] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0a8151-d0df-4ff6-87cd-bf29839157ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.544421] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.547020] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd667c49-8271-48a8-8c67-fb5f86bf2f82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.555586] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1448.555586] env[69992]: value = "task-2898156" [ 1448.555586] env[69992]: _type = "Task" [ 1448.555586] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.563732] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.718448] env[69992]: DEBUG nova.compute.manager [req-a18b06f8-192b-4e20-b20a-e63815ba5753 req-3830120b-4050-4048-b644-5868abf8c143 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received event network-vif-deleted-3b4a3277-a4e1-4872-87b4-f4fcaadff6bc {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1448.718642] env[69992]: INFO nova.compute.manager [req-a18b06f8-192b-4e20-b20a-e63815ba5753 req-3830120b-4050-4048-b644-5868abf8c143 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Neutron deleted interface 3b4a3277-a4e1-4872-87b4-f4fcaadff6bc; detaching it from the instance and deleting it from the info cache [ 1448.718896] env[69992]: DEBUG nova.network.neutron [req-a18b06f8-192b-4e20-b20a-e63815ba5753 req-3830120b-4050-4048-b644-5868abf8c143 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Updating instance_info_cache with network_info: [{"id": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "address": "fa:16:3e:c3:fe:08", "network": {"id": "4a5b0621-bdd9-45b6-90b7-f6389406ef1e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-414485365", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d5c32fe8b254c5abdd4123bd2088353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e4cacd9-f0", "ovs_interfaceid": "5e4cacd9-f025-4a13-8d5b-615b3c9e12a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.804235] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3252e0-be80-457b-b88a-ebcffa9c2202 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.810935] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f716ee97-e27d-4f5c-a449-9c1fe6a37ef5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.844555] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea28b51-87d2-447c-ad1c-6b78f71549ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.852633] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751753d4-89a4-4280-86ea-87cbe55688c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.868154] env[69992]: DEBUG nova.compute.provider_tree [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.065283] env[69992]: INFO nova.compute.manager [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Took 14.12 seconds to build instance. [ 1449.070038] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898156, 'name': PowerOffVM_Task, 'duration_secs': 0.213301} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.070288] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.071099] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309caced-7506-4ca5-a12e-427daa6e9f97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.093667] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f205eba3-bbcf-4c3b-906f-cd431aef8496 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.221902] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0961e12-e656-4439-b9ab-b2326d2ea2dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.233625] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04b8676-e2ee-4d8c-94b6-a9a5a4d6a647 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.251260] env[69992]: DEBUG nova.network.neutron [-] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.261858] env[69992]: DEBUG nova.compute.manager [req-a18b06f8-192b-4e20-b20a-e63815ba5753 req-3830120b-4050-4048-b644-5868abf8c143 service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Detach interface failed, port_id=3b4a3277-a4e1-4872-87b4-f4fcaadff6bc, reason: Instance 9d290fe7-12d2-416e-9608-7a8e7e9b2f65 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1449.328211] env[69992]: DEBUG nova.network.neutron [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updated VIF entry in instance network info cache for port 7c7f4aa4-cd49-487f-8637-9ee035bbab41. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1449.328607] env[69992]: DEBUG nova.network.neutron [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updating instance_info_cache with network_info: [{"id": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "address": "fa:16:3e:6a:b0:aa", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c7f4aa4-cd", "ovs_interfaceid": "7c7f4aa4-cd49-487f-8637-9ee035bbab41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.371534] env[69992]: DEBUG nova.scheduler.client.report [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1449.567495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-504aedff-9de9-48dc-a668-b417307ec78f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.626s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.567495] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.615s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.567676] env[69992]: INFO nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] During sync_power_state the instance has a pending task (networking). Skip. [ 1449.567846] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.606666] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Creating Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1449.607013] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a010677b-5f57-4597-a629-8d3edf98366e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.614883] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1449.614883] env[69992]: value = "task-2898157" [ 1449.614883] env[69992]: _type = "Task" [ 1449.614883] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.623569] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898157, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.764169] env[69992]: INFO nova.compute.manager [-] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Took 1.84 seconds to deallocate network for instance. [ 1449.796157] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.796398] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.811702] env[69992]: DEBUG nova.compute.manager [None req-37cb5002-bc6e-414b-a3d3-38b24885bb05 tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1449.812619] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fac0b5-5a73-430d-8aba-9847b20cd0c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.832194] env[69992]: DEBUG oslo_concurrency.lockutils [req-5dbe9f8f-1750-476b-ad3c-9e7c47b06bcd req-d9f6c139-cd19-40dd-b17e-dc13d71c5c1d service nova] Releasing lock "refresh_cache-5df7d031-66bf-43eb-a05b-07b6cff9db59" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1449.881144] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.908034] env[69992]: INFO nova.scheduler.client.report [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted allocations for instance 9464339a-b760-47e9-bc75-e88ce18bf71b [ 1449.926546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "7c8b830a-e89c-4d97-a987-141797aaa55f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.926904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.927051] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "7c8b830a-e89c-4d97-a987-141797aaa55f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.927186] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.927354] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.930375] env[69992]: INFO nova.compute.manager [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Terminating instance [ 1450.129893] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898157, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.274538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1450.274538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1450.274538] env[69992]: DEBUG nova.objects.instance [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lazy-loading 'resources' on Instance uuid 9d290fe7-12d2-416e-9608-7a8e7e9b2f65 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.299245] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1450.323573] env[69992]: INFO nova.compute.manager [None req-37cb5002-bc6e-414b-a3d3-38b24885bb05 tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] instance snapshotting [ 1450.324512] env[69992]: DEBUG nova.objects.instance [None req-37cb5002-bc6e-414b-a3d3-38b24885bb05 tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lazy-loading 'flavor' on Instance uuid 7c8b830a-e89c-4d97-a987-141797aaa55f {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.417744] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4d921edf-462e-49da-a4d3-03be16925792 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "9464339a-b760-47e9-bc75-e88ce18bf71b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.145s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1450.435812] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "refresh_cache-7c8b830a-e89c-4d97-a987-141797aaa55f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.435812] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquired lock "refresh_cache-7c8b830a-e89c-4d97-a987-141797aaa55f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1450.435812] env[69992]: DEBUG nova.network.neutron [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1450.626201] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898157, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.746612] env[69992]: DEBUG nova.compute.manager [req-0af8664e-ab94-4228-9095-f62fc662595b req-72da31e4-6bbc-46b1-9be7-6f59a4e1fa8c service nova] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Received event network-vif-deleted-5e4cacd9-f025-4a13-8d5b-615b3c9e12a3 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1450.826331] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1450.834333] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a39cd2-ee98-4266-b4b4-951cc166f78c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.853952] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4f9909-58d3-4832-b75f-45e7c249bf0c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.899111] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89abf510-297e-4376-a3fe-ed6ef13d4ee7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.911539] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f54aeb-b231-4fab-95fd-df3c25f80a48 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.946205] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf722a6-0c2d-403c-a795-784286c95cb1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.953828] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5338bc40-309e-45bf-b07b-aae5c423a346 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.967276] env[69992]: DEBUG nova.compute.provider_tree [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.969203] env[69992]: DEBUG nova.network.neutron [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1451.016838] env[69992]: DEBUG nova.network.neutron [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.127357] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898157, 'name': CreateSnapshot_Task, 'duration_secs': 1.069675} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.127693] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Created Snapshot of the VM instance {{(pid=69992) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1451.129574] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6e3aa6-3a77-45ec-99b2-b7e42c9ca333 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.371226] env[69992]: DEBUG nova.compute.manager [None req-37cb5002-bc6e-414b-a3d3-38b24885bb05 tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Instance disappeared during snapshot {{(pid=69992) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1451.472430] env[69992]: DEBUG nova.scheduler.client.report [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1451.487021] env[69992]: DEBUG nova.compute.manager [None req-37cb5002-bc6e-414b-a3d3-38b24885bb05 tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Found 0 images (rotation: 2) {{(pid=69992) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1451.520207] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Releasing lock "refresh_cache-7c8b830a-e89c-4d97-a987-141797aaa55f" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1451.520691] env[69992]: DEBUG nova.compute.manager [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1451.520954] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1451.521845] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f9a4fe-4899-483a-b4cc-644611333ca8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.529935] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1451.530215] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ee129a4-7683-4b88-bf51-706fac95327e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.536765] env[69992]: DEBUG oslo_vmware.api [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1451.536765] env[69992]: value = "task-2898158" [ 1451.536765] env[69992]: _type = "Task" [ 1451.536765] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.546437] env[69992]: DEBUG oslo_vmware.api [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.650381] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Creating linked-clone VM from snapshot {{(pid=69992) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1451.650381] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a796dc4a-4903-47a3-a5b7-16da8a45bb5b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.657968] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1451.657968] env[69992]: value = "task-2898159" [ 1451.657968] env[69992]: _type = "Task" [ 1451.657968] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.666760] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898159, 'name': CloneVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.977569] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1451.980492] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.154s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1451.982532] env[69992]: INFO nova.compute.claims [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1451.999262] env[69992]: INFO nova.scheduler.client.report [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Deleted allocations for instance 9d290fe7-12d2-416e-9608-7a8e7e9b2f65 [ 1452.048869] env[69992]: DEBUG oslo_vmware.api [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898158, 'name': PowerOffVM_Task, 'duration_secs': 0.213586} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.049545] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1452.049755] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1452.050737] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d214de2-36fc-4be9-9efa-1bdd26ed18c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.083815] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1452.084149] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1452.084474] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Deleting the datastore file [datastore1] 7c8b830a-e89c-4d97-a987-141797aaa55f {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1452.085073] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d48f4bc9-1c2f-4e1d-9b7a-2163737809dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.092430] env[69992]: DEBUG oslo_vmware.api [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for the task: (returnval){ [ 1452.092430] env[69992]: value = "task-2898161" [ 1452.092430] env[69992]: _type = "Task" [ 1452.092430] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.101281] env[69992]: DEBUG oslo_vmware.api [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.138681] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.138973] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.168901] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898159, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.323735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.323982] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.505779] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a7c61b78-376f-4ed2-9c10-783656f5ca0c tempest-ServersTestMultiNic-418732069 tempest-ServersTestMultiNic-418732069-project-member] Lock "9d290fe7-12d2-416e-9608-7a8e7e9b2f65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.258s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1452.603686] env[69992]: DEBUG oslo_vmware.api [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Task: {'id': task-2898161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136162} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.603856] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1452.604252] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1452.604364] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1452.604442] env[69992]: INFO nova.compute.manager [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1452.604708] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1452.604916] env[69992]: DEBUG nova.compute.manager [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1452.605018] env[69992]: DEBUG nova.network.neutron [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1452.609305] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.670826] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898159, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.826490] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1452.842201] env[69992]: DEBUG nova.network.neutron [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1453.078164] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca3d328-2d1e-4a26-ae13-48ed6be0d405 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.088172] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9ae75c-349d-4e23-b7e3-ddc350fc8e15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.121219] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4ab0fc-7d75-49b5-ba7d-9c21d3442583 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.129911] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc18995-5d4e-4639-bada-5fc884ecadaf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.144607] env[69992]: DEBUG nova.compute.provider_tree [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.169587] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898159, 'name': CloneVM_Task} progress is 94%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.345187] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1453.345503] env[69992]: DEBUG nova.network.neutron [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.609717] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.610115] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.610167] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.648406] env[69992]: DEBUG nova.scheduler.client.report [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1453.669991] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898159, 'name': CloneVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.848257] env[69992]: INFO nova.compute.manager [-] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Took 1.24 seconds to deallocate network for instance. [ 1454.113573] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.155110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1454.155638] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1454.158244] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.813s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1454.160251] env[69992]: INFO nova.compute.claims [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1454.172243] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898159, 'name': CloneVM_Task, 'duration_secs': 2.082739} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.172515] env[69992]: INFO nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Created linked-clone VM from snapshot [ 1454.173273] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9911a26-4796-4527-bcfb-dbe1056883fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.181403] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Uploading image 3837388a-a31b-4d6b-97e6-ea9f24ecc066 {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1454.210460] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1454.210460] env[69992]: value = "vm-582137" [ 1454.210460] env[69992]: _type = "VirtualMachine" [ 1454.210460] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1454.211976] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-53189069-0f25-4f3a-a9b7-34ffd7b469c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.220359] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease: (returnval){ [ 1454.220359] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527f5a8c-b4f4-0b5f-33af-7ee2a2e75c14" [ 1454.220359] env[69992]: _type = "HttpNfcLease" [ 1454.220359] env[69992]: } obtained for exporting VM: (result){ [ 1454.220359] env[69992]: value = "vm-582137" [ 1454.220359] env[69992]: _type = "VirtualMachine" [ 1454.220359] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1454.220958] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the lease: (returnval){ [ 1454.220958] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527f5a8c-b4f4-0b5f-33af-7ee2a2e75c14" [ 1454.220958] env[69992]: _type = "HttpNfcLease" [ 1454.220958] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1454.228997] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1454.228997] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527f5a8c-b4f4-0b5f-33af-7ee2a2e75c14" [ 1454.228997] env[69992]: _type = "HttpNfcLease" [ 1454.228997] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1454.355805] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1454.663886] env[69992]: DEBUG nova.compute.utils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1454.669176] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1454.669176] env[69992]: DEBUG nova.network.neutron [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1454.729686] env[69992]: DEBUG nova.policy [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17e2aa2d004c4b278f490b7749b4e610', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ec2752ef25a42378aaaa39036f083de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1454.733355] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1454.733355] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527f5a8c-b4f4-0b5f-33af-7ee2a2e75c14" [ 1454.733355] env[69992]: _type = "HttpNfcLease" [ 1454.733355] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1454.733715] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1454.733715] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527f5a8c-b4f4-0b5f-33af-7ee2a2e75c14" [ 1454.733715] env[69992]: _type = "HttpNfcLease" [ 1454.733715] env[69992]: }. {{(pid=69992) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1454.734491] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7139718c-5524-4af6-bc21-b53368ad5f37 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.742909] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fcc2b6-1a3a-e680-35f1-54cfd60ace72/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1454.743141] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fcc2b6-1a3a-e680-35f1-54cfd60ace72/disk-0.vmdk for reading. {{(pid=69992) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1454.909302] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e46c03dd-a603-4129-8a05-4dc164d5c3c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.141311] env[69992]: DEBUG nova.network.neutron [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Successfully created port: 1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1455.169142] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1455.291560] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e278582-02e4-4af2-b4db-c23362056031 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.300570] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb211544-1ca8-41be-b3db-327e67044c16 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.341707] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8f10f8-7a3b-4b5e-b1ba-b83f7f8a6d96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.351135] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae65e13c-7e42-42e5-beaa-7485a7e77f17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.367037] env[69992]: DEBUG nova.compute.provider_tree [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1455.870411] env[69992]: DEBUG nova.scheduler.client.report [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1456.180845] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1456.203677] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1456.204153] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.204398] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1456.204705] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.205041] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1456.205333] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1456.205677] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1456.205912] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1456.206157] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1456.206350] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1456.206546] env[69992]: DEBUG nova.virt.hardware [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1456.207732] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b08f46-da56-43c9-b4b6-7249c7113650 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.218063] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5ac3fb-9139-42a6-9a24-ac1bda60cd26 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.376605] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1456.377215] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1456.379995] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.267s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.380185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1456.380341] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1456.380628] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.025s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.380914] env[69992]: DEBUG nova.objects.instance [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lazy-loading 'resources' on Instance uuid 7c8b830a-e89c-4d97-a987-141797aaa55f {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1456.382718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33edbf7-1d24-41ba-941d-51987050f823 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.393911] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b25426-c100-454c-b451-7103b7c5cc56 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.409966] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7cc320-90a2-4ed6-82a2-d68e1e3aa3ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.419366] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6dcf589-44cf-4b32-8e4c-b39fa22e5e49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.454938] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180131MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1456.455111] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.608717] env[69992]: DEBUG nova.compute.manager [req-e552d917-f947-40d7-951b-a912e0c397d2 req-85ed2fb5-feef-43a3-94d4-be7bf26ab111 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Received event network-vif-plugged-1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1456.609018] env[69992]: DEBUG oslo_concurrency.lockutils [req-e552d917-f947-40d7-951b-a912e0c397d2 req-85ed2fb5-feef-43a3-94d4-be7bf26ab111 service nova] Acquiring lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.609295] env[69992]: DEBUG oslo_concurrency.lockutils [req-e552d917-f947-40d7-951b-a912e0c397d2 req-85ed2fb5-feef-43a3-94d4-be7bf26ab111 service nova] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.609477] env[69992]: DEBUG oslo_concurrency.lockutils [req-e552d917-f947-40d7-951b-a912e0c397d2 req-85ed2fb5-feef-43a3-94d4-be7bf26ab111 service nova] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1456.609706] env[69992]: DEBUG nova.compute.manager [req-e552d917-f947-40d7-951b-a912e0c397d2 req-85ed2fb5-feef-43a3-94d4-be7bf26ab111 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] No waiting events found dispatching network-vif-plugged-1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1456.609885] env[69992]: WARNING nova.compute.manager [req-e552d917-f947-40d7-951b-a912e0c397d2 req-85ed2fb5-feef-43a3-94d4-be7bf26ab111 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Received unexpected event network-vif-plugged-1ffe5905-a465-45ef-9e79-c955f95cc370 for instance with vm_state building and task_state spawning. [ 1456.705655] env[69992]: DEBUG nova.network.neutron [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Successfully updated port: 1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1456.886023] env[69992]: DEBUG nova.compute.utils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1456.886023] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1456.887051] env[69992]: DEBUG nova.network.neutron [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1456.953846] env[69992]: DEBUG nova.policy [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdc7f71c9c4b4d40bf40b631c24b5ee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17ab89c6cf054418a4dd1a0e61b3a5e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1457.041217] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5049e3-fd59-436e-bc59-fa598991d3d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.051552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd39ed5-2d67-4c9d-95c5-a54c526eacb1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.103965] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55007592-b92d-409c-b762-510e7fd3cb34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.113148] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041247b6-13d3-47be-b2e3-1992b1299ec4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.136726] env[69992]: DEBUG nova.compute.provider_tree [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.209503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "refresh_cache-d2b4482f-cc98-4e3d-9996-397f4f0b2ead" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.209613] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquired lock "refresh_cache-d2b4482f-cc98-4e3d-9996-397f4f0b2ead" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1457.209798] env[69992]: DEBUG nova.network.neutron [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1457.291987] env[69992]: DEBUG nova.network.neutron [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Successfully created port: b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1457.393259] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1457.644846] env[69992]: DEBUG nova.scheduler.client.report [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1457.881042] env[69992]: DEBUG nova.network.neutron [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1458.082368] env[69992]: DEBUG nova.network.neutron [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Updating instance_info_cache with network_info: [{"id": "1ffe5905-a465-45ef-9e79-c955f95cc370", "address": "fa:16:3e:41:a3:36", "network": {"id": "e10f0120-1dcd-4766-abc3-321951974732", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-37569696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ec2752ef25a42378aaaa39036f083de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ffe5905-a4", "ovs_interfaceid": "1ffe5905-a465-45ef-9e79-c955f95cc370", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.149460] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1458.154018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.697s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1458.190196] env[69992]: INFO nova.scheduler.client.report [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Deleted allocations for instance 7c8b830a-e89c-4d97-a987-141797aaa55f [ 1458.407223] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1458.435168] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1458.435442] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1458.435602] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1458.435785] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1458.435932] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1458.436122] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1458.436351] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1458.436512] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1458.436682] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1458.436847] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1458.437064] env[69992]: DEBUG nova.virt.hardware [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1458.438206] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a777e75f-b6f0-4b4f-967e-8d7cd48044f7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.449493] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab595fd8-cd98-4537-8b07-ddf28337ca49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.589023] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Releasing lock "refresh_cache-d2b4482f-cc98-4e3d-9996-397f4f0b2ead" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1458.589023] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Instance network_info: |[{"id": "1ffe5905-a465-45ef-9e79-c955f95cc370", "address": "fa:16:3e:41:a3:36", "network": {"id": "e10f0120-1dcd-4766-abc3-321951974732", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-37569696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ec2752ef25a42378aaaa39036f083de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ffe5905-a4", "ovs_interfaceid": "1ffe5905-a465-45ef-9e79-c955f95cc370", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1458.589023] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:a3:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e839c46-1ae9-43b7-9518-8f18f48100dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ffe5905-a465-45ef-9e79-c955f95cc370', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1458.595133] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Creating folder: Project (7ec2752ef25a42378aaaa39036f083de). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1458.595577] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca147283-7980-457e-8ea3-e925f6585805 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.612401] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Created folder: Project (7ec2752ef25a42378aaaa39036f083de) in parent group-v581821. [ 1458.612592] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Creating folder: Instances. Parent ref: group-v582138. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1458.612842] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-661e1562-992d-4d80-b6c1-2da44561c35e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.624387] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Created folder: Instances in parent group-v582138. [ 1458.624574] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1458.625859] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1458.625859] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb643a4c-fa5a-47e3-bac8-e885c44e9710 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.649054] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1458.649054] env[69992]: value = "task-2898165" [ 1458.649054] env[69992]: _type = "Task" [ 1458.649054] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.665191] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898165, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.699971] env[69992]: DEBUG oslo_concurrency.lockutils [None req-8b76d474-aa2b-4dcd-9a9b-a816d9eeff3f tempest-ServersAaction247Test-2133763590 tempest-ServersAaction247Test-2133763590-project-member] Lock "7c8b830a-e89c-4d97-a987-141797aaa55f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.773s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1458.703552] env[69992]: DEBUG nova.compute.manager [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Received event network-changed-1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1458.703952] env[69992]: DEBUG nova.compute.manager [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Refreshing instance network info cache due to event network-changed-1ffe5905-a465-45ef-9e79-c955f95cc370. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1458.704273] env[69992]: DEBUG oslo_concurrency.lockutils [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] Acquiring lock "refresh_cache-d2b4482f-cc98-4e3d-9996-397f4f0b2ead" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.704593] env[69992]: DEBUG oslo_concurrency.lockutils [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] Acquired lock "refresh_cache-d2b4482f-cc98-4e3d-9996-397f4f0b2ead" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1458.705014] env[69992]: DEBUG nova.network.neutron [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Refreshing network info cache for port 1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.094233] env[69992]: DEBUG nova.network.neutron [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Successfully updated port: b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1459.160453] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898165, 'name': CreateVM_Task, 'duration_secs': 0.44541} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.161118] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1459.172684] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.172876] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1459.173236] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1459.173513] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b896e601-f183-4ae8-bbc6-4205d677ce12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.181924] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1459.181924] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52d792ba-dcd1-ad4c-1bcb-4c3bf86155fb" [ 1459.181924] env[69992]: _type = "Task" [ 1459.181924] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.185064] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.185241] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance b7af455d-a3a7-480f-b778-9eb3724fa6f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.185382] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 5df7d031-66bf-43eb-a05b-07b6cff9db59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.185532] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance d2b4482f-cc98-4e3d-9996-397f4f0b2ead actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.185650] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1459.185878] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1459.185974] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1459.192263] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d792ba-dcd1-ad4c-1bcb-4c3bf86155fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.259591] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aafeb85-ad8b-4c8f-82f5-eb1fb0a40c8e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.268445] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d419a759-46c1-4e1d-bf21-79df137f7bf1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.310160] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0b0c6d-221a-4819-97e3-e42519b52876 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.320030] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ec5519-3300-4517-a124-7e86f17663df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.338044] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.507871] env[69992]: DEBUG nova.network.neutron [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Updated VIF entry in instance network info cache for port 1ffe5905-a465-45ef-9e79-c955f95cc370. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1459.508262] env[69992]: DEBUG nova.network.neutron [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Updating instance_info_cache with network_info: [{"id": "1ffe5905-a465-45ef-9e79-c955f95cc370", "address": "fa:16:3e:41:a3:36", "network": {"id": "e10f0120-1dcd-4766-abc3-321951974732", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-37569696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ec2752ef25a42378aaaa39036f083de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ffe5905-a4", "ovs_interfaceid": "1ffe5905-a465-45ef-9e79-c955f95cc370", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.598121] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.598121] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1459.598121] env[69992]: DEBUG nova.network.neutron [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1459.691199] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52d792ba-dcd1-ad4c-1bcb-4c3bf86155fb, 'name': SearchDatastore_Task, 'duration_secs': 0.012795} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.691520] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1459.691758] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.691999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.692163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1459.692344] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.692619] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cb82beb-63f9-45cf-a502-0fa78e9fc92a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.703246] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.703625] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.704359] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-840f0235-875d-417e-b97c-1ac6991576ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.710957] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1459.710957] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52150253-3a1f-59d1-0be8-32da0956f10c" [ 1459.710957] env[69992]: _type = "Task" [ 1459.710957] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.720457] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52150253-3a1f-59d1-0be8-32da0956f10c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.725735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1459.725883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1459.844261] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1460.011776] env[69992]: DEBUG oslo_concurrency.lockutils [req-4173f051-574f-4072-93e4-666d4b222078 req-15399c2e-15e5-4f29-8c48-309f753d6c82 service nova] Releasing lock "refresh_cache-d2b4482f-cc98-4e3d-9996-397f4f0b2ead" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1460.146979] env[69992]: DEBUG nova.network.neutron [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1460.223975] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52150253-3a1f-59d1-0be8-32da0956f10c, 'name': SearchDatastore_Task, 'duration_secs': 0.017676} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.224810] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3071e2ba-5f3a-4554-adc7-af0afcbeb6b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.228775] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1460.233332] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1460.233332] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]527dd261-2a89-d620-8ddf-bec0a6d9d48b" [ 1460.233332] env[69992]: _type = "Task" [ 1460.233332] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.245652] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527dd261-2a89-d620-8ddf-bec0a6d9d48b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.349354] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1460.349582] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.198s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.350678] env[69992]: DEBUG nova.network.neutron [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updating instance_info_cache with network_info: [{"id": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "address": "fa:16:3e:f4:e6:7b", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710419b-6f", "ovs_interfaceid": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.743140] env[69992]: DEBUG nova.compute.manager [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Received event network-vif-plugged-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1460.743364] env[69992]: DEBUG oslo_concurrency.lockutils [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] Acquiring lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.743569] env[69992]: DEBUG oslo_concurrency.lockutils [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.743738] env[69992]: DEBUG oslo_concurrency.lockutils [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.743934] env[69992]: DEBUG nova.compute.manager [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] No waiting events found dispatching network-vif-plugged-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1460.744123] env[69992]: WARNING nova.compute.manager [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Received unexpected event network-vif-plugged-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 for instance with vm_state building and task_state spawning. [ 1460.744288] env[69992]: DEBUG nova.compute.manager [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Received event network-changed-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1460.744444] env[69992]: DEBUG nova.compute.manager [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Refreshing instance network info cache due to event network-changed-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1460.744656] env[69992]: DEBUG oslo_concurrency.lockutils [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] Acquiring lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.754841] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]527dd261-2a89-d620-8ddf-bec0a6d9d48b, 'name': SearchDatastore_Task, 'duration_secs': 0.014731} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.755379] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1460.755636] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] d2b4482f-cc98-4e3d-9996-397f4f0b2ead/d2b4482f-cc98-4e3d-9996-397f4f0b2ead.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.755895] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee34e0d5-39f2-4cc5-a80c-32fcd7505f42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.760193] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.760444] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.761893] env[69992]: INFO nova.compute.claims [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.773054] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1460.773054] env[69992]: value = "task-2898166" [ 1460.773054] env[69992]: _type = "Task" [ 1460.773054] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.781961] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.854023] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1460.854456] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Instance network_info: |[{"id": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "address": "fa:16:3e:f4:e6:7b", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710419b-6f", "ovs_interfaceid": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1460.854828] env[69992]: DEBUG oslo_concurrency.lockutils [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] Acquired lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1460.855070] env[69992]: DEBUG nova.network.neutron [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Refreshing network info cache for port b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.856419] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:e6:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.865026] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1460.866166] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1460.866393] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a4199e9-c40b-4b97-b2de-ee1ac5dea3c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.889414] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.889414] env[69992]: value = "task-2898167" [ 1460.889414] env[69992]: _type = "Task" [ 1460.889414] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.898302] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.898564] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1460.904272] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898167, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.285430] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898166, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.351021] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.351021] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.351021] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1461.400983] env[69992]: DEBUG nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1461.404301] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898167, 'name': CreateVM_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.598265] env[69992]: DEBUG nova.network.neutron [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updated VIF entry in instance network info cache for port b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.598637] env[69992]: DEBUG nova.network.neutron [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updating instance_info_cache with network_info: [{"id": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "address": "fa:16:3e:f4:e6:7b", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710419b-6f", "ovs_interfaceid": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.784316] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898166, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544756} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.784579] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] d2b4482f-cc98-4e3d-9996-397f4f0b2ead/d2b4482f-cc98-4e3d-9996-397f4f0b2ead.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.784802] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.785101] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acec3bdd-4e1a-4f02-a064-23192a4f1d8a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.794548] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1461.794548] env[69992]: value = "task-2898168" [ 1461.794548] env[69992]: _type = "Task" [ 1461.794548] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.806437] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.879687] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f510c9c1-3080-4475-b7e5-f091fcd913ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.888099] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aa612a-6546-4c67-8116-81e09096c3ac {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.902282] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898167, 'name': CreateVM_Task, 'duration_secs': 0.725431} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.926768] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1461.929139] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.929305] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.929622] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1461.930454] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb99b7c-57d8-4dda-931a-06f31e3a94c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.935305] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b12f38bb-825b-497b-925c-333adcf07ed4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.941260] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1461.941260] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5206e084-5721-c730-66a3-dcc34f363ff3" [ 1461.941260] env[69992]: _type = "Task" [ 1461.941260] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.947157] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20dd693-55e9-4f4e-8a48-296649fabc6f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.951885] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.957979] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5206e084-5721-c730-66a3-dcc34f363ff3, 'name': SearchDatastore_Task, 'duration_secs': 0.012497} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.965555] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1461.965808] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.966057] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.966211] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1461.966384] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.966881] env[69992]: DEBUG nova.compute.provider_tree [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1461.968251] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99a2ddcb-6733-4ac6-99b8-4335ff7a3e53 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.978216] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.978413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1461.979184] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aedef810-15ba-48c0-ad34-8eae2277a88a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.985459] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1461.985459] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52caa524-0cca-3cab-77d7-acb89b82dad0" [ 1461.985459] env[69992]: _type = "Task" [ 1461.985459] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.000582] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52caa524-0cca-3cab-77d7-acb89b82dad0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.101095] env[69992]: DEBUG oslo_concurrency.lockutils [req-19c735c8-95d1-436e-855c-c1c200dc30b2 req-8397f196-902e-4d4a-8c0e-8ea28ac00092 service nova] Releasing lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1462.305538] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084277} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.305958] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1462.306693] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fa5f38-7e5f-49ae-806c-302775ac8074 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.329332] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] d2b4482f-cc98-4e3d-9996-397f4f0b2ead/d2b4482f-cc98-4e3d-9996-397f4f0b2ead.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1462.329684] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-874ff986-63ab-4956-b84f-416f0d614c22 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.354272] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1462.354272] env[69992]: value = "task-2898169" [ 1462.354272] env[69992]: _type = "Task" [ 1462.354272] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.363669] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.472026] env[69992]: DEBUG nova.scheduler.client.report [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1462.497513] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52caa524-0cca-3cab-77d7-acb89b82dad0, 'name': SearchDatastore_Task, 'duration_secs': 0.026313} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.498342] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c593f46f-2d47-4127-8dc5-e903ff1d6870 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.504906] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1462.504906] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5211dda8-ff5f-489d-8699-6a19f8d186c9" [ 1462.504906] env[69992]: _type = "Task" [ 1462.504906] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.514023] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5211dda8-ff5f-489d-8699-6a19f8d186c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.668619] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fcc2b6-1a3a-e680-35f1-54cfd60ace72/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1462.669542] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e29bbb1-84e8-413e-a3e2-57f3a4126cff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.675831] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fcc2b6-1a3a-e680-35f1-54cfd60ace72/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1462.676013] env[69992]: ERROR oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fcc2b6-1a3a-e680-35f1-54cfd60ace72/disk-0.vmdk due to incomplete transfer. [ 1462.676250] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-026d19d6-8672-46db-b4d1-be5ceab73ba7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.684074] env[69992]: DEBUG oslo_vmware.rw_handles [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fcc2b6-1a3a-e680-35f1-54cfd60ace72/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1462.684252] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Uploaded image 3837388a-a31b-4d6b-97e6-ea9f24ecc066 to the Glance image server {{(pid=69992) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1462.686611] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Destroying the VM {{(pid=69992) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1462.686874] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c3a6b9b4-63ed-4988-ba89-7457efe17dcb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.695622] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1462.695622] env[69992]: value = "task-2898170" [ 1462.695622] env[69992]: _type = "Task" [ 1462.695622] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.701929] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898170, 'name': Destroy_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.864902] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898169, 'name': ReconfigVM_Task, 'duration_secs': 0.343355} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.865273] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Reconfigured VM instance instance-00000075 to attach disk [datastore2] d2b4482f-cc98-4e3d-9996-397f4f0b2ead/d2b4482f-cc98-4e3d-9996-397f4f0b2ead.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1462.865932] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f88d6d8f-0ad1-4ac2-8ddf-9e19026588ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.872972] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1462.872972] env[69992]: value = "task-2898171" [ 1462.872972] env[69992]: _type = "Task" [ 1462.872972] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.880606] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898171, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.977560] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1462.978222] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1462.981416] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.030s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1462.982902] env[69992]: INFO nova.compute.claims [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.018160] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5211dda8-ff5f-489d-8699-6a19f8d186c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012132} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.018415] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1463.018670] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc/57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.018931] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07611e47-1c23-4d43-a7d9-6cc8fc7b7e6b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.027274] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1463.027274] env[69992]: value = "task-2898172" [ 1463.027274] env[69992]: _type = "Task" [ 1463.027274] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.035951] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.206620] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898170, 'name': Destroy_Task, 'duration_secs': 0.381911} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.206922] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Destroyed the VM [ 1463.207144] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deleting Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1463.207496] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d2d2b6d4-4a19-4c73-b39e-1c54bdd03b9f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.215851] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1463.215851] env[69992]: value = "task-2898173" [ 1463.215851] env[69992]: _type = "Task" [ 1463.215851] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.225206] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898173, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.388363] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898171, 'name': Rename_Task, 'duration_secs': 0.17593} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.388773] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1463.389266] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f408c4e-3ad3-4291-bc7a-376553408ee0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.401280] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1463.401280] env[69992]: value = "task-2898174" [ 1463.401280] env[69992]: _type = "Task" [ 1463.401280] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.412268] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898174, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.487564] env[69992]: DEBUG nova.compute.utils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1463.490797] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1463.490997] env[69992]: DEBUG nova.network.neutron [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1463.533369] env[69992]: DEBUG nova.policy [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99bd7545f7d04aa28e625ce6c5491bb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '122bc9ffa8f54a34af6047517fab0a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1463.540857] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898172, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443848} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.541160] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc/57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1463.541377] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1463.541630] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c4aea7e-a33f-4a87-88d6-0da64e5d0cd2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.550769] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1463.550769] env[69992]: value = "task-2898175" [ 1463.550769] env[69992]: _type = "Task" [ 1463.550769] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.558657] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898175, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.727927] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898173, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.847805] env[69992]: DEBUG nova.network.neutron [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Successfully created port: 0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1463.913221] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898174, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.992771] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1464.063031] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898175, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07172} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.063800] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1464.064645] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea0ae44-61bd-442a-a8af-3e0a4797c93e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.090123] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc/57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.092717] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e43462ac-81d5-49e1-8643-bd3bed3e5338 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.114079] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1464.114079] env[69992]: value = "task-2898176" [ 1464.114079] env[69992]: _type = "Task" [ 1464.114079] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.123211] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898176, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.130649] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f6aebc-279b-4bd5-8642-e834a727c410 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.139592] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f5fab2-65d8-4b53-9ad6-c64933940cd8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.170075] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d797165-ce65-42ff-b25a-887fe11d28b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.178280] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0621ff9-efea-4a07-bb10-0037ede0fb32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.194224] env[69992]: DEBUG nova.compute.provider_tree [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.226896] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898173, 'name': RemoveSnapshot_Task, 'duration_secs': 0.549379} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.227282] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deleted Snapshot of the VM instance {{(pid=69992) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1464.227563] env[69992]: DEBUG nova.compute.manager [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1464.228357] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1629a4e5-320e-44ce-a30c-6a444bae260c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.413031] env[69992]: DEBUG oslo_vmware.api [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898174, 'name': PowerOnVM_Task, 'duration_secs': 0.531821} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.413311] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.413517] env[69992]: INFO nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1464.413693] env[69992]: DEBUG nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1464.414521] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9305cf29-b340-4c96-9b50-f8ee2218d6a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.624814] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898176, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.697589] env[69992]: DEBUG nova.scheduler.client.report [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1464.740341] env[69992]: INFO nova.compute.manager [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Shelve offloading [ 1464.932880] env[69992]: INFO nova.compute.manager [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Took 14.13 seconds to build instance. [ 1465.005880] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1465.032253] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1465.032549] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1465.032671] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1465.032857] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1465.033015] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1465.033173] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1465.033385] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1465.033547] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1465.033787] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1465.034029] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1465.034219] env[69992]: DEBUG nova.virt.hardware [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1465.035124] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7d588c-1054-40e7-ae46-b9b7ba66b704 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.044095] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a488214-db93-4381-9459-055a256ebccf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.125800] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898176, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.202377] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.202837] env[69992]: DEBUG nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1465.243356] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.243878] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98b9c1aa-9bfa-45df-88ce-339585008008 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.252604] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1465.252604] env[69992]: value = "task-2898177" [ 1465.252604] env[69992]: _type = "Task" [ 1465.252604] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.261082] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.372912] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.435951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c5955a59-6ee3-4404-af19-d6031cfd5c0a tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.639s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.435951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.063s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.435951] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.436388] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.436388] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.438630] env[69992]: INFO nova.compute.manager [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Terminating instance [ 1465.555653] env[69992]: DEBUG nova.compute.manager [req-0ef0deb2-2ae6-45b4-8469-335bad6cc4f6 req-5952cdfa-8d55-4baa-b078-7e2b69449775 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Received event network-vif-plugged-0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1465.555802] env[69992]: DEBUG oslo_concurrency.lockutils [req-0ef0deb2-2ae6-45b4-8469-335bad6cc4f6 req-5952cdfa-8d55-4baa-b078-7e2b69449775 service nova] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1465.555918] env[69992]: DEBUG oslo_concurrency.lockutils [req-0ef0deb2-2ae6-45b4-8469-335bad6cc4f6 req-5952cdfa-8d55-4baa-b078-7e2b69449775 service nova] Lock "90facf1a-ae81-4259-bf75-94779267699c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1465.556068] env[69992]: DEBUG oslo_concurrency.lockutils [req-0ef0deb2-2ae6-45b4-8469-335bad6cc4f6 req-5952cdfa-8d55-4baa-b078-7e2b69449775 service nova] Lock "90facf1a-ae81-4259-bf75-94779267699c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1465.556253] env[69992]: DEBUG nova.compute.manager [req-0ef0deb2-2ae6-45b4-8469-335bad6cc4f6 req-5952cdfa-8d55-4baa-b078-7e2b69449775 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] No waiting events found dispatching network-vif-plugged-0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1465.556417] env[69992]: WARNING nova.compute.manager [req-0ef0deb2-2ae6-45b4-8469-335bad6cc4f6 req-5952cdfa-8d55-4baa-b078-7e2b69449775 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Received unexpected event network-vif-plugged-0ec50d92-4ea0-44af-b9fd-14443de36a12 for instance with vm_state building and task_state spawning. [ 1465.625230] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898176, 'name': ReconfigVM_Task, 'duration_secs': 1.03005} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.625503] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc/57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.626567] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1947bb84-1729-41c9-914f-bbbe68b931e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.632921] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1465.632921] env[69992]: value = "task-2898178" [ 1465.632921] env[69992]: _type = "Task" [ 1465.632921] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.641218] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898178, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.655040] env[69992]: DEBUG nova.network.neutron [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Successfully updated port: 0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1465.708170] env[69992]: DEBUG nova.compute.utils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1465.710055] env[69992]: DEBUG nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Not allocating networking since 'none' was specified. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1465.765622] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] VM already powered off {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1465.765851] env[69992]: DEBUG nova.compute.manager [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1465.766729] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c739268-4459-4bcf-8beb-02c71f38adff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.772803] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.772990] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1465.773340] env[69992]: DEBUG nova.network.neutron [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.942223] env[69992]: DEBUG nova.compute.manager [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1465.942463] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1465.943497] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2860d6a-f75a-4b8d-b798-4b2d9194eae6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.956622] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.956814] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f586bc1e-516b-46cb-a832-0c019d77a988 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.963976] env[69992]: DEBUG oslo_vmware.api [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1465.963976] env[69992]: value = "task-2898179" [ 1465.963976] env[69992]: _type = "Task" [ 1465.963976] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.972149] env[69992]: DEBUG oslo_vmware.api [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.143922] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898178, 'name': Rename_Task, 'duration_secs': 0.155928} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.144270] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.144572] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f7cd86e-b8ee-4419-962b-7db6dbc867f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.153179] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1466.153179] env[69992]: value = "task-2898180" [ 1466.153179] env[69992]: _type = "Task" [ 1466.153179] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.157041] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.157185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1466.157333] env[69992]: DEBUG nova.network.neutron [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1466.163177] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.211369] env[69992]: DEBUG nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1466.474034] env[69992]: DEBUG oslo_vmware.api [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898179, 'name': PowerOffVM_Task, 'duration_secs': 0.191612} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.474509] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1466.474695] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1466.474961] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-213f82ce-a4a8-47f3-81b9-a5d50848d000 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.480177] env[69992]: DEBUG nova.network.neutron [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.549796] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1466.550197] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1466.550657] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Deleting the datastore file [datastore2] d2b4482f-cc98-4e3d-9996-397f4f0b2ead {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1466.550863] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef2699ff-a0d7-4c1a-9f7f-b61af0268b47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.559971] env[69992]: DEBUG oslo_vmware.api [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for the task: (returnval){ [ 1466.559971] env[69992]: value = "task-2898182" [ 1466.559971] env[69992]: _type = "Task" [ 1466.559971] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.568797] env[69992]: DEBUG oslo_vmware.api [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.665960] env[69992]: DEBUG oslo_vmware.api [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898180, 'name': PowerOnVM_Task, 'duration_secs': 0.489398} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.666328] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1466.666555] env[69992]: INFO nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Took 8.26 seconds to spawn the instance on the hypervisor. [ 1466.666829] env[69992]: DEBUG nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1466.668040] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a3fe7c-1b7c-42d0-af2e-ca0fdb8e921e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.693715] env[69992]: DEBUG nova.network.neutron [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1466.848326] env[69992]: DEBUG nova.network.neutron [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.982798] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1467.069973] env[69992]: DEBUG oslo_vmware.api [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Task: {'id': task-2898182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167097} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.070274] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1467.070495] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1467.070678] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1467.070850] env[69992]: INFO nova.compute.manager [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1467.071097] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1467.071294] env[69992]: DEBUG nova.compute.manager [-] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1467.071386] env[69992]: DEBUG nova.network.neutron [-] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1467.189353] env[69992]: INFO nova.compute.manager [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Took 13.86 seconds to build instance. [ 1467.220648] env[69992]: DEBUG nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1467.254056] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1467.254201] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.254293] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1467.254468] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.254609] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1467.254754] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1467.254966] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1467.256363] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1467.256363] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1467.256363] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1467.256577] env[69992]: DEBUG nova.virt.hardware [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1467.257380] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6040b694-467a-4c2f-ac8c-3006618071bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.266637] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c09410-f4c4-4849-9f43-b4f7c2956c8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.285300] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1467.290696] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Creating folder: Project (3f8feae86ed444028e5edbff00aa3857). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1467.291032] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-072c2c11-ce61-4d2c-98a7-c9d505482b4d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.303379] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Created folder: Project (3f8feae86ed444028e5edbff00aa3857) in parent group-v581821. [ 1467.303612] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Creating folder: Instances. Parent ref: group-v582142. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1467.303858] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e89b19c-b66b-4ee3-b7d9-2012aa0dd120 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.315409] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Created folder: Instances in parent group-v582142. [ 1467.315597] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1467.315795] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1467.316009] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b3e3014-ebe1-4f6d-a586-5f60b839b669 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.336473] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1467.336473] env[69992]: value = "task-2898185" [ 1467.336473] env[69992]: _type = "Task" [ 1467.336473] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.344795] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898185, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.347432] env[69992]: DEBUG nova.compute.manager [req-be6d4fea-3ca8-406d-b1fd-7c826dad2af3 req-34d1a043-d2eb-47bf-b418-33b9d93f0ea4 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Received event network-vif-deleted-1ffe5905-a465-45ef-9e79-c955f95cc370 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1467.347626] env[69992]: INFO nova.compute.manager [req-be6d4fea-3ca8-406d-b1fd-7c826dad2af3 req-34d1a043-d2eb-47bf-b418-33b9d93f0ea4 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Neutron deleted interface 1ffe5905-a465-45ef-9e79-c955f95cc370; detaching it from the instance and deleting it from the info cache [ 1467.347823] env[69992]: DEBUG nova.network.neutron [req-be6d4fea-3ca8-406d-b1fd-7c826dad2af3 req-34d1a043-d2eb-47bf-b418-33b9d93f0ea4 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.353022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1467.353022] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Instance network_info: |[{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1467.353022] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:4d:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ec50d92-4ea0-44af-b9fd-14443de36a12', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1467.359125] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating folder: Project (122bc9ffa8f54a34af6047517fab0a9a). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1467.359886] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-717435e9-d6b0-4919-8811-bd54951e5908 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.376559] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created folder: Project (122bc9ffa8f54a34af6047517fab0a9a) in parent group-v581821. [ 1467.376901] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating folder: Instances. Parent ref: group-v582145. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1467.377032] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b75969d8-5f63-4c61-8d8f-e85e12a4a689 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.386290] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created folder: Instances in parent group-v582145. [ 1467.386528] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1467.386721] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1467.387089] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a8af6b7-ed0f-44b7-b007-1aee7ff58b3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.405028] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1467.405956] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b3cdfd-ed68-42fd-bb12-fbe0bd5700af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.409735] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1467.409735] env[69992]: value = "task-2898188" [ 1467.409735] env[69992]: _type = "Task" [ 1467.409735] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.415709] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1467.416414] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d93b262-5232-46c5-835d-66deab81b854 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.421403] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898188, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.500260] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1467.501110] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1467.501110] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleting the datastore file [datastore1] b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1467.502023] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f196d84b-3801-4c59-be74-7d2600292cb3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.509901] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1467.509901] env[69992]: value = "task-2898190" [ 1467.509901] env[69992]: _type = "Task" [ 1467.509901] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.518931] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898190, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.588339] env[69992]: DEBUG nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Received event network-changed-0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1467.588753] env[69992]: DEBUG nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Refreshing instance network info cache due to event network-changed-0ec50d92-4ea0-44af-b9fd-14443de36a12. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1467.589144] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.589484] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1467.589819] env[69992]: DEBUG nova.network.neutron [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Refreshing network info cache for port 0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1467.692226] env[69992]: DEBUG oslo_concurrency.lockutils [None req-417c7ada-45c9-41ff-969c-4c724a5abf6c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.368s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1467.832066] env[69992]: DEBUG nova.network.neutron [-] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.848141] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898185, 'name': CreateVM_Task, 'duration_secs': 0.314967} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.848313] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1467.848741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.848900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1467.849236] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1467.849544] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-143291e4-8a07-4264-9262-b90aeda105e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.851672] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bad14c93-8e85-4c6e-b600-9c361e752d04 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.858116] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1467.858116] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a21c2b-0ad2-571f-dab8-229ab9232c9c" [ 1467.858116] env[69992]: _type = "Task" [ 1467.858116] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.864762] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75d74a7-f376-48e8-aae5-e918c3cd1a7e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.881135] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a21c2b-0ad2-571f-dab8-229ab9232c9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.896382] env[69992]: DEBUG nova.compute.manager [req-be6d4fea-3ca8-406d-b1fd-7c826dad2af3 req-34d1a043-d2eb-47bf-b418-33b9d93f0ea4 service nova] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Detach interface failed, port_id=1ffe5905-a465-45ef-9e79-c955f95cc370, reason: Instance d2b4482f-cc98-4e3d-9996-397f4f0b2ead could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1467.919692] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898188, 'name': CreateVM_Task, 'duration_secs': 0.38948} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.919973] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1467.920563] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.020488] env[69992]: DEBUG oslo_vmware.api [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898190, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172197} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.020756] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1468.020943] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1468.021137] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1468.041210] env[69992]: INFO nova.scheduler.client.report [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted allocations for instance b7af455d-a3a7-480f-b778-9eb3724fa6f1 [ 1468.285199] env[69992]: DEBUG nova.network.neutron [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updated VIF entry in instance network info cache for port 0ec50d92-4ea0-44af-b9fd-14443de36a12. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1468.285651] env[69992]: DEBUG nova.network.neutron [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.334025] env[69992]: INFO nova.compute.manager [-] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Took 1.26 seconds to deallocate network for instance. [ 1468.372382] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a21c2b-0ad2-571f-dab8-229ab9232c9c, 'name': SearchDatastore_Task, 'duration_secs': 0.026717} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.374623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1468.374623] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1468.374623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.374623] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1468.375675] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1468.375675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1468.375675] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1468.375675] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-077618fe-ea81-4c82-9d65-55b50aa29a7e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.380555] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51020e2b-8917-43f5-be64-91c5d72620d5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.388584] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1468.388584] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523f4d81-60fd-2458-13fe-5587b3c73b23" [ 1468.388584] env[69992]: _type = "Task" [ 1468.388584] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.393729] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1468.393819] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1468.395691] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6329f797-cb7a-4869-a181-d357287ca92a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.401574] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f4d81-60fd-2458-13fe-5587b3c73b23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.405273] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1468.405273] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5280cdd8-525c-e358-5d98-9d33fa0e3ac0" [ 1468.405273] env[69992]: _type = "Task" [ 1468.405273] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.413408] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5280cdd8-525c-e358-5d98-9d33fa0e3ac0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.545270] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.545585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.545798] env[69992]: DEBUG nova.objects.instance [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'resources' on Instance uuid b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1468.791309] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1468.791660] env[69992]: DEBUG nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-vif-unplugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1468.791792] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.792273] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1468.792273] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1468.792417] env[69992]: DEBUG nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] No waiting events found dispatching network-vif-unplugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1468.792624] env[69992]: WARNING nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received unexpected event network-vif-unplugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 for instance with vm_state shelved and task_state shelving_offloading. [ 1468.792791] env[69992]: DEBUG nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1468.792947] env[69992]: DEBUG nova.compute.manager [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing instance network info cache due to event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1468.793151] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.793292] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1468.793452] env[69992]: DEBUG nova.network.neutron [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1468.842693] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1468.898998] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523f4d81-60fd-2458-13fe-5587b3c73b23, 'name': SearchDatastore_Task, 'duration_secs': 0.020103} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.899341] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1468.899590] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1468.899807] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.915062] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5280cdd8-525c-e358-5d98-9d33fa0e3ac0, 'name': SearchDatastore_Task, 'duration_secs': 0.010322} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.915847] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b357d697-6b92-4c16-8947-7c10e20f92d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.921275] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1468.921275] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52115ad7-9d72-d6c3-2d95-e53815350d70" [ 1468.921275] env[69992]: _type = "Task" [ 1468.921275] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.931203] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52115ad7-9d72-d6c3-2d95-e53815350d70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.049272] env[69992]: DEBUG nova.objects.instance [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'numa_topology' on Instance uuid b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1469.431623] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52115ad7-9d72-d6c3-2d95-e53815350d70, 'name': SearchDatastore_Task, 'duration_secs': 0.026736} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.431878] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1469.432161] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1469.432446] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.432635] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1469.432853] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20b11f66-3f0f-4cbc-b8a3-2e59bbc6e67e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.434790] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02707f8e-c621-4f3a-aefc-458cfded89fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.450020] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1469.450020] env[69992]: value = "task-2898192" [ 1469.450020] env[69992]: _type = "Task" [ 1469.450020] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.452691] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1469.452883] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1469.454133] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e6f754c-487e-4cd1-bb8f-c474239ebc5e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.459893] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.464266] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1469.464266] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f33c39-f645-0dfa-088b-caf3041e2cfc" [ 1469.464266] env[69992]: _type = "Task" [ 1469.464266] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.475883] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f33c39-f645-0dfa-088b-caf3041e2cfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.545390] env[69992]: DEBUG nova.network.neutron [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updated VIF entry in instance network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1469.545783] env[69992]: DEBUG nova.network.neutron [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.552020] env[69992]: DEBUG nova.objects.base [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1469.649338] env[69992]: DEBUG nova.compute.manager [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Received event network-changed-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1469.649672] env[69992]: DEBUG nova.compute.manager [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Refreshing instance network info cache due to event network-changed-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1469.649672] env[69992]: DEBUG oslo_concurrency.lockutils [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] Acquiring lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.649892] env[69992]: DEBUG oslo_concurrency.lockutils [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] Acquired lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1469.650155] env[69992]: DEBUG nova.network.neutron [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Refreshing network info cache for port b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.657076] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ea7bde-4b3d-4210-b9bf-7c81370ea18c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.666629] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a3d6e7-ae80-4d04-9427-c563d2ca4776 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.699479] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6643c1-5b3c-455b-93aa-f108b07c520c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.709230] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806c5547-e6c8-477d-b255-cda9b66e49dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.724998] env[69992]: DEBUG nova.compute.provider_tree [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.959319] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898192, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.974871] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f33c39-f645-0dfa-088b-caf3041e2cfc, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.975097] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5de7370-1ffa-471c-ae5b-668fbfccac13 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.980650] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1469.980650] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f7dbb5-8ae0-3a4a-17e6-4697996e4c2a" [ 1469.980650] env[69992]: _type = "Task" [ 1469.980650] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.989044] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f7dbb5-8ae0-3a4a-17e6-4697996e4c2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.048685] env[69992]: DEBUG oslo_concurrency.lockutils [req-ac5afe38-2909-42dc-b91c-152be17698da req-26b02981-ba91-45ed-9f9b-337652d034d0 service nova] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1470.228760] env[69992]: DEBUG nova.scheduler.client.report [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1470.235124] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1470.438265] env[69992]: DEBUG nova.network.neutron [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updated VIF entry in instance network info cache for port b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.438637] env[69992]: DEBUG nova.network.neutron [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updating instance_info_cache with network_info: [{"id": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "address": "fa:16:3e:f4:e6:7b", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710419b-6f", "ovs_interfaceid": "b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.460127] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898192, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52074} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.460407] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1470.460624] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1470.460866] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb1eed65-4729-4c68-be85-579fe9905df6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.467782] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1470.467782] env[69992]: value = "task-2898193" [ 1470.467782] env[69992]: _type = "Task" [ 1470.467782] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.475339] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898193, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.490089] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f7dbb5-8ae0-3a4a-17e6-4697996e4c2a, 'name': SearchDatastore_Task, 'duration_secs': 0.012542} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.490340] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1470.490627] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 90facf1a-ae81-4259-bf75-94779267699c/90facf1a-ae81-4259-bf75-94779267699c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1470.490889] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5403bbc9-e1a9-4231-acad-774e8914bff8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.496471] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1470.496471] env[69992]: value = "task-2898194" [ 1470.496471] env[69992]: _type = "Task" [ 1470.496471] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.503887] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.733451] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1470.735986] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.893s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1470.736814] env[69992]: DEBUG nova.objects.instance [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lazy-loading 'resources' on Instance uuid d2b4482f-cc98-4e3d-9996-397f4f0b2ead {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1470.941280] env[69992]: DEBUG oslo_concurrency.lockutils [req-542d05bb-2a54-4d6d-8475-bdf0ab03aa77 req-82ea7527-5c71-4377-bee9-52dca0b6827a service nova] Releasing lock "refresh_cache-57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1470.977959] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898193, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064335} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.978240] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1470.979054] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46874e8-d312-4370-9744-f2e99d8d5724 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.999018] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1470.999355] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b676b5f9-35b3-4daa-9a68-1b052e7c8949 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.021825] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.023073] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1471.023073] env[69992]: value = "task-2898195" [ 1471.023073] env[69992]: _type = "Task" [ 1471.023073] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.031893] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898195, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.245247] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1b2ff3b5-14c6-408d-8435-5c62bd5dea61 tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.722s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1471.246050] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.012s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1471.246292] env[69992]: INFO nova.compute.manager [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Unshelving [ 1471.366500] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ece565-9935-46ba-97a6-adb924eadc05 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.374573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3375427d-0637-47b5-9551-e705b1ca1aef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.406236] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbbb514-86ba-4107-bd81-2c483edf6018 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.413767] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9168fd-9d34-49fb-b337-8706e432f8c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.428217] env[69992]: DEBUG nova.compute.provider_tree [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.512110] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898194, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.532596] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898195, 'name': ReconfigVM_Task, 'duration_secs': 0.336165} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.532979] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Reconfigured VM instance instance-00000078 to attach disk [datastore1] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1471.533711] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-462859e6-34af-4d07-803d-44294ec792e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.539504] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1471.539504] env[69992]: value = "task-2898197" [ 1471.539504] env[69992]: _type = "Task" [ 1471.539504] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.548523] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898197, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.931064] env[69992]: DEBUG nova.scheduler.client.report [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1472.012604] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898194, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.14495} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.012890] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 90facf1a-ae81-4259-bf75-94779267699c/90facf1a-ae81-4259-bf75-94779267699c.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1472.013120] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1472.013374] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e5e92d9-1c52-4b79-aed9-e4e7185f69b3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.021279] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1472.021279] env[69992]: value = "task-2898198" [ 1472.021279] env[69992]: _type = "Task" [ 1472.021279] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.031562] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.049055] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898197, 'name': Rename_Task, 'duration_secs': 0.139876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.049340] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1472.049588] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44bd8383-8bde-4d9f-b3ad-54c9b3dcd012 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.056571] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1472.056571] env[69992]: value = "task-2898199" [ 1472.056571] env[69992]: _type = "Task" [ 1472.056571] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.066654] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.271655] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1472.436237] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1472.438624] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.167s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1472.439048] env[69992]: DEBUG nova.objects.instance [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'pci_requests' on Instance uuid b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1472.456196] env[69992]: INFO nova.scheduler.client.report [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Deleted allocations for instance d2b4482f-cc98-4e3d-9996-397f4f0b2ead [ 1472.531854] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082681} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.532248] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.533325] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcca3c8-8709-44a6-9a4d-c7d9e32588b1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.556568] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 90facf1a-ae81-4259-bf75-94779267699c/90facf1a-ae81-4259-bf75-94779267699c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.556955] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-746f623f-c5fb-4772-82e0-201467d96e79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.581739] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898199, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.583433] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1472.583433] env[69992]: value = "task-2898200" [ 1472.583433] env[69992]: _type = "Task" [ 1472.583433] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.591432] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898200, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.946239] env[69992]: DEBUG nova.objects.instance [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'numa_topology' on Instance uuid b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1472.964516] env[69992]: DEBUG oslo_concurrency.lockutils [None req-d05af6d1-f548-4013-8b42-cf1744435faf tempest-InstanceActionsNegativeTestJSON-1177223440 tempest-InstanceActionsNegativeTestJSON-1177223440-project-member] Lock "d2b4482f-cc98-4e3d-9996-397f4f0b2ead" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.529s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.066566] env[69992]: DEBUG oslo_vmware.api [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898199, 'name': PowerOnVM_Task, 'duration_secs': 0.526832} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.066878] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1473.067108] env[69992]: INFO nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Took 5.85 seconds to spawn the instance on the hypervisor. [ 1473.067297] env[69992]: DEBUG nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1473.068065] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87175b53-82d2-4c78-b8ee-cb15b9565800 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.092146] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.451322] env[69992]: INFO nova.compute.claims [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.598292] env[69992]: INFO nova.compute.manager [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Took 11.67 seconds to build instance. [ 1473.605490] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898200, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.943592] env[69992]: INFO nova.compute.manager [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Rebuilding instance [ 1473.993978] env[69992]: DEBUG nova.compute.manager [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1473.994898] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2e4813-414c-4dd9-b0a2-d3a12bac82ea {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.099046] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898200, 'name': ReconfigVM_Task, 'duration_secs': 1.03053} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.099572] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 90facf1a-ae81-4259-bf75-94779267699c/90facf1a-ae81-4259-bf75-94779267699c.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1474.100732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-86c61271-c26d-4ce6-a3af-a029016c3d37 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.202s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1474.101090] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70abca76-caf0-42af-b3c2-375fcd7a67df {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.109530] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1474.109530] env[69992]: value = "task-2898202" [ 1474.109530] env[69992]: _type = "Task" [ 1474.109530] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.122845] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898202, 'name': Rename_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.557340] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13991a2e-c5c0-4216-be58-8fbea608c0d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.565736] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc762174-af85-4523-bc54-a649be77e088 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.598123] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621b972b-3c26-4425-bec2-2548036c6d07 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.605919] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a84568-fec6-47ac-a379-222513a574cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.620326] env[69992]: DEBUG nova.compute.provider_tree [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1474.627697] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898202, 'name': Rename_Task, 'duration_secs': 0.193575} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.627697] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1474.627697] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0bdd9d2-b7d0-4d00-8a89-49c642fa0feb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.632760] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1474.632760] env[69992]: value = "task-2898203" [ 1474.632760] env[69992]: _type = "Task" [ 1474.632760] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.640860] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.009913] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1475.010328] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7799ee49-42de-4938-8ec1-c3530a5db8a4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.018881] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1475.018881] env[69992]: value = "task-2898204" [ 1475.018881] env[69992]: _type = "Task" [ 1475.018881] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.030503] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.124169] env[69992]: DEBUG nova.scheduler.client.report [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1475.145660] env[69992]: DEBUG oslo_vmware.api [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898203, 'name': PowerOnVM_Task, 'duration_secs': 0.429503} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.145921] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.146140] env[69992]: INFO nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Took 10.14 seconds to spawn the instance on the hypervisor. [ 1475.146337] env[69992]: DEBUG nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1475.147112] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740c66dc-da5e-4136-bb7a-e431c528f882 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.529382] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898204, 'name': PowerOffVM_Task, 'duration_secs': 0.235325} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.529692] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1475.529927] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1475.530677] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045aad69-803e-4314-9ffe-70d8e8d48ad3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.537183] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1475.537397] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60391065-89ca-4671-95af-555f6d040e20 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.571543] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1475.571837] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1475.572053] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Deleting the datastore file [datastore1] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1475.572473] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00e68307-d397-4f9b-b80e-d0daadd624c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.582168] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1475.582168] env[69992]: value = "task-2898206" [ 1475.582168] env[69992]: _type = "Task" [ 1475.582168] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.590759] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.630033] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.191s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1475.665702] env[69992]: INFO nova.compute.manager [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Took 14.93 seconds to build instance. [ 1475.688013] env[69992]: INFO nova.network.neutron [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating port 4d2794aa-7eaf-404a-bf09-16cf3c357511 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1476.093259] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132022} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.093539] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1476.093748] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1476.093901] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1476.168090] env[69992]: DEBUG oslo_concurrency.lockutils [None req-88b6b882-5644-4ab2-8af4-b085716feca9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.442s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1476.409737] env[69992]: DEBUG nova.compute.manager [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Received event network-changed-0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1476.411636] env[69992]: DEBUG nova.compute.manager [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Refreshing instance network info cache due to event network-changed-0ec50d92-4ea0-44af-b9fd-14443de36a12. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1476.411880] env[69992]: DEBUG oslo_concurrency.lockutils [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.412050] env[69992]: DEBUG oslo_concurrency.lockutils [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1476.412265] env[69992]: DEBUG nova.network.neutron [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Refreshing network info cache for port 0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1477.118916] env[69992]: DEBUG nova.network.neutron [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updated VIF entry in instance network info cache for port 0ec50d92-4ea0-44af-b9fd-14443de36a12. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.119326] env[69992]: DEBUG nova.network.neutron [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.131895] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1477.132172] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.132338] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1477.132528] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.132677] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1477.132850] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1477.133118] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1477.133206] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1477.133379] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1477.133543] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1477.133714] env[69992]: DEBUG nova.virt.hardware [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1477.134641] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764e10b3-4ee3-433b-92c9-99310e3ef1f8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.144182] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d8c633-7cfc-4e83-a318-a242d6192c29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.158688] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance VIF info [] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1477.164966] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1477.165230] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1477.165445] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec252831-8390-44ce-ac86-c5e92a6d1b5a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.184614] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1477.184614] env[69992]: value = "task-2898208" [ 1477.184614] env[69992]: _type = "Task" [ 1477.184614] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.192687] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898208, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.240013] env[69992]: DEBUG nova.compute.manager [req-fec74f29-9fd2-4385-abab-56ffefd3a18c req-92b9973a-8362-4ff0-8c1f-39e742e6fa6e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-vif-plugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1477.240249] env[69992]: DEBUG oslo_concurrency.lockutils [req-fec74f29-9fd2-4385-abab-56ffefd3a18c req-92b9973a-8362-4ff0-8c1f-39e742e6fa6e service nova] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.240551] env[69992]: DEBUG oslo_concurrency.lockutils [req-fec74f29-9fd2-4385-abab-56ffefd3a18c req-92b9973a-8362-4ff0-8c1f-39e742e6fa6e service nova] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.240773] env[69992]: DEBUG oslo_concurrency.lockutils [req-fec74f29-9fd2-4385-abab-56ffefd3a18c req-92b9973a-8362-4ff0-8c1f-39e742e6fa6e service nova] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1477.240976] env[69992]: DEBUG nova.compute.manager [req-fec74f29-9fd2-4385-abab-56ffefd3a18c req-92b9973a-8362-4ff0-8c1f-39e742e6fa6e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] No waiting events found dispatching network-vif-plugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1477.241166] env[69992]: WARNING nova.compute.manager [req-fec74f29-9fd2-4385-abab-56ffefd3a18c req-92b9973a-8362-4ff0-8c1f-39e742e6fa6e service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received unexpected event network-vif-plugged-4d2794aa-7eaf-404a-bf09-16cf3c357511 for instance with vm_state shelved_offloaded and task_state spawning. [ 1477.332009] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.332247] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1477.332397] env[69992]: DEBUG nova.network.neutron [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.623152] env[69992]: DEBUG oslo_concurrency.lockutils [req-00d81356-1669-44d5-89e9-bcf0570e410f req-39090e0b-3f22-439d-84f5-df9b0519da7f service nova] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1477.695099] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898208, 'name': CreateVM_Task, 'duration_secs': 0.279313} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.695313] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1477.695746] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.695907] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1477.696255] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1477.696594] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ced0e3fb-643a-4fae-a0d4-656b4dbbfe71 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.702903] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1477.702903] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a1dfc4-41c7-c6b9-6b6d-5ed4d0d5e48e" [ 1477.702903] env[69992]: _type = "Task" [ 1477.702903] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.709954] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a1dfc4-41c7-c6b9-6b6d-5ed4d0d5e48e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.068328] env[69992]: DEBUG nova.network.neutron [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.213174] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a1dfc4-41c7-c6b9-6b6d-5ed4d0d5e48e, 'name': SearchDatastore_Task, 'duration_secs': 0.011507} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.213555] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1478.213707] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1478.213945] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.214110] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1478.214324] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1478.214609] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9efd4063-8e46-4727-979b-3e99e8132aa6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.229030] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1478.229320] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1478.230428] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d9b6862-2893-42cd-bb13-3d0b0ed22e51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.238184] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1478.238184] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52eb5c99-39a6-01d0-5289-98637a4d4b09" [ 1478.238184] env[69992]: _type = "Task" [ 1478.238184] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.252359] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52eb5c99-39a6-01d0-5289-98637a4d4b09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.571092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1478.595129] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='54e3ed2f4f921c25b88a6ea3487eaeb8',container_format='bare',created_at=2025-03-10T17:55:55Z,direct_url=,disk_format='vmdk',id=3837388a-a31b-4d6b-97e6-ea9f24ecc066,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-9798354-shelved',owner='ca458056b0794b08b812f0a4106a448c',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-10T17:56:11Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1478.595575] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.595842] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1478.596177] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.596428] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1478.596691] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1478.597338] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1478.597338] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1478.597574] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1478.597767] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1478.597946] env[69992]: DEBUG nova.virt.hardware [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1478.599122] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e732da65-bb58-43ee-adba-00998918108c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.607823] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da06ee37-3766-4363-93e0-ba8bc5eff826 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.622388] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:a9:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d2794aa-7eaf-404a-bf09-16cf3c357511', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.629731] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1478.630009] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.630234] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0966a6e-f1ed-4998-a2cd-76485f4f65ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.651017] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.651017] env[69992]: value = "task-2898210" [ 1478.651017] env[69992]: _type = "Task" [ 1478.651017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.658952] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898210, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.750029] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52eb5c99-39a6-01d0-5289-98637a4d4b09, 'name': SearchDatastore_Task, 'duration_secs': 0.029136} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.750877] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f85b568a-0b38-4c9c-82c8-41f1c6242a96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.757459] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1478.757459] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525fcb8f-aba3-13de-c16f-4a51d54d0353" [ 1478.757459] env[69992]: _type = "Task" [ 1478.757459] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.765721] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525fcb8f-aba3-13de-c16f-4a51d54d0353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.162017] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898210, 'name': CreateVM_Task, 'duration_secs': 0.368751} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.162202] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.162823] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.162992] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1479.163377] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1479.163638] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d4787a9-6c5a-46e2-a752-8f8d847a2c82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.168866] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1479.168866] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5224d03c-0e47-7e15-9d2b-e0b4ce784d97" [ 1479.168866] env[69992]: _type = "Task" [ 1479.168866] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.177540] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5224d03c-0e47-7e15-9d2b-e0b4ce784d97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.268030] env[69992]: DEBUG nova.compute.manager [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1479.268313] env[69992]: DEBUG nova.compute.manager [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing instance network info cache due to event network-changed-4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1479.268540] env[69992]: DEBUG oslo_concurrency.lockutils [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] Acquiring lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.268739] env[69992]: DEBUG oslo_concurrency.lockutils [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] Acquired lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1479.268739] env[69992]: DEBUG nova.network.neutron [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Refreshing network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1479.274149] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525fcb8f-aba3-13de-c16f-4a51d54d0353, 'name': SearchDatastore_Task, 'duration_secs': 0.019687} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.274676] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1479.274930] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1479.275212] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37aafad0-03cd-4382-96a6-ae75ed342319 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.285205] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1479.285205] env[69992]: value = "task-2898211" [ 1479.285205] env[69992]: _type = "Task" [ 1479.285205] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.294604] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.682064] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1479.682384] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Processing image 3837388a-a31b-4d6b-97e6-ea9f24ecc066 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.682659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.682856] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1479.683098] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.683408] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b87fa1c-2987-4c96-ae91-fd887e41669a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.700772] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.700972] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.701985] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-834572e8-0463-44da-95a6-c3850af91a69 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.711135] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1479.711135] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529d4c4b-bb71-8fd1-a59c-238f306c290e" [ 1479.711135] env[69992]: _type = "Task" [ 1479.711135] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.721108] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529d4c4b-bb71-8fd1-a59c-238f306c290e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.797783] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898211, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.021362] env[69992]: DEBUG nova.network.neutron [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updated VIF entry in instance network info cache for port 4d2794aa-7eaf-404a-bf09-16cf3c357511. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1480.021738] env[69992]: DEBUG nova.network.neutron [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [{"id": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "address": "fa:16:3e:61:a9:59", "network": {"id": "5252c1dc-ca51-4cca-a7e7-bbe1795957d1", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1689852635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca458056b0794b08b812f0a4106a448c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d2794aa-7e", "ovs_interfaceid": "4d2794aa-7eaf-404a-bf09-16cf3c357511", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.223422] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Preparing fetch location {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1480.223671] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Fetch image to [datastore2] OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034/OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034.vmdk {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1480.223795] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Downloading stream optimized image 3837388a-a31b-4d6b-97e6-ea9f24ecc066 to [datastore2] OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034/OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034.vmdk on the data store datastore2 as vApp {{(pid=69992) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1480.224077] env[69992]: DEBUG nova.virt.vmwareapi.images [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Downloading image file data 3837388a-a31b-4d6b-97e6-ea9f24ecc066 to the ESX as VM named 'OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034' {{(pid=69992) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1480.291410] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1480.291410] env[69992]: value = "resgroup-9" [ 1480.291410] env[69992]: _type = "ResourcePool" [ 1480.291410] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1480.292058] env[69992]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7e3c3e0c-7081-4489-8660-d8ddaa4f1fdd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.310178] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719594} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.310834] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1480.311099] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1480.311373] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd07abc7-dc63-48ab-993a-f6bae42db961 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.316439] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease: (returnval){ [ 1480.316439] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f0413-ca66-6807-6978-19d8e913c121" [ 1480.316439] env[69992]: _type = "HttpNfcLease" [ 1480.316439] env[69992]: } obtained for vApp import into resource pool (val){ [ 1480.316439] env[69992]: value = "resgroup-9" [ 1480.316439] env[69992]: _type = "ResourcePool" [ 1480.316439] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1480.316790] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the lease: (returnval){ [ 1480.316790] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f0413-ca66-6807-6978-19d8e913c121" [ 1480.316790] env[69992]: _type = "HttpNfcLease" [ 1480.316790] env[69992]: } to be ready. {{(pid=69992) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1480.320498] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1480.320498] env[69992]: value = "task-2898213" [ 1480.320498] env[69992]: _type = "Task" [ 1480.320498] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.325992] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1480.325992] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f0413-ca66-6807-6978-19d8e913c121" [ 1480.325992] env[69992]: _type = "HttpNfcLease" [ 1480.325992] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1480.331859] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898213, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.524882] env[69992]: DEBUG oslo_concurrency.lockutils [req-925d71fc-505c-4d66-a033-c3e29fc2b059 req-299e9345-6b74-4b11-8dbf-05b1abcacde0 service nova] Releasing lock "refresh_cache-b7af455d-a3a7-480f-b778-9eb3724fa6f1" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1480.825950] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1480.825950] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f0413-ca66-6807-6978-19d8e913c121" [ 1480.825950] env[69992]: _type = "HttpNfcLease" [ 1480.825950] env[69992]: } is initializing. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1480.831230] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898213, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069599} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.831529] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1480.832328] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e92f63-5b88-4389-adeb-47abfb43fc87 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.852377] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.852621] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5de27164-cbb0-4286-af7a-b868c436d81e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.874879] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1480.874879] env[69992]: value = "task-2898214" [ 1480.874879] env[69992]: _type = "Task" [ 1480.874879] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.888500] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898214, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.325706] env[69992]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1481.325706] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f0413-ca66-6807-6978-19d8e913c121" [ 1481.325706] env[69992]: _type = "HttpNfcLease" [ 1481.325706] env[69992]: } is ready. {{(pid=69992) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1481.326246] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1481.326246] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528f0413-ca66-6807-6978-19d8e913c121" [ 1481.326246] env[69992]: _type = "HttpNfcLease" [ 1481.326246] env[69992]: }. {{(pid=69992) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1481.326854] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2834359d-ba48-406f-962e-0a056e139ec6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.334059] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523c52d2-9a3c-242d-ce27-59dbb30076d0/disk-0.vmdk from lease info. {{(pid=69992) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1481.334241] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523c52d2-9a3c-242d-ce27-59dbb30076d0/disk-0.vmdk. {{(pid=69992) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1481.401212] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-38caad0c-ada5-404f-9e41-650744f61a44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.402988] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898214, 'name': ReconfigVM_Task, 'duration_secs': 0.309989} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.403342] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Reconfigured VM instance instance-00000078 to attach disk [datastore2] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b/ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1481.404160] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50187560-1d76-427d-ab49-4052fbac3c17 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.412043] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1481.412043] env[69992]: value = "task-2898215" [ 1481.412043] env[69992]: _type = "Task" [ 1481.412043] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.419526] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898215, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.925100] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898215, 'name': Rename_Task, 'duration_secs': 0.139845} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.926081] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1481.926351] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c996d5f-22ca-4237-9bb7-96cbca565f7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.933746] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1481.933746] env[69992]: value = "task-2898217" [ 1481.933746] env[69992]: _type = "Task" [ 1481.933746] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.942353] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.444719] env[69992]: DEBUG oslo_vmware.api [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898217, 'name': PowerOnVM_Task, 'duration_secs': 0.429957} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.445093] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1482.445093] env[69992]: DEBUG nova.compute.manager [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1482.445919] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b605fa49-22ec-4f4e-8a3b-80107bb99a76 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.644561] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Completed reading data from the image iterator. {{(pid=69992) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1482.644790] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523c52d2-9a3c-242d-ce27-59dbb30076d0/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1482.645746] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6da9e6a-928f-4763-bcdb-fdd3d752f88f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.653348] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523c52d2-9a3c-242d-ce27-59dbb30076d0/disk-0.vmdk is in state: ready. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1482.653553] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523c52d2-9a3c-242d-ce27-59dbb30076d0/disk-0.vmdk. {{(pid=69992) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1482.653806] env[69992]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-182216c1-0384-46b3-82c9-99ee1aa5663f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.968975] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1482.969299] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.969433] env[69992]: DEBUG nova.objects.instance [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1483.046705] env[69992]: DEBUG oslo_vmware.rw_handles [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523c52d2-9a3c-242d-ce27-59dbb30076d0/disk-0.vmdk. {{(pid=69992) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1483.046966] env[69992]: INFO nova.virt.vmwareapi.images [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Downloaded image file data 3837388a-a31b-4d6b-97e6-ea9f24ecc066 [ 1483.047789] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2b9ef9-5843-4915-8b3d-bdd030ddae05 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.063935] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce323d28-dc08-43b9-b2e3-ec79adf44ff9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.127933] env[69992]: INFO nova.virt.vmwareapi.images [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] The imported VM was unregistered [ 1483.130861] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Caching image {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1483.131124] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Creating directory with path [datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1483.131771] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-652001f4-ff54-4816-9ce6-036d0689ec41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.165528] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Created directory with path [datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066 {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1483.165749] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034/OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034.vmdk to [datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk. {{(pid=69992) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1483.166016] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b33d8b16-eb16-4c74-9847-f5e61e9d9a1f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.174647] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1483.174647] env[69992]: value = "task-2898219" [ 1483.174647] env[69992]: _type = "Task" [ 1483.174647] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.182792] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898219, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.687395] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898219, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.735210] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1483.735540] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1483.735791] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1483.735993] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1483.736194] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.738578] env[69992]: INFO nova.compute.manager [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Terminating instance [ 1483.978710] env[69992]: DEBUG oslo_concurrency.lockutils [None req-326c0b26-53b7-40ba-b0fc-03718ae0ad8d tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.188537] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898219, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.243179] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "refresh_cache-ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.243358] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquired lock "refresh_cache-ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1484.243555] env[69992]: DEBUG nova.network.neutron [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1484.689767] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898219, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.764410] env[69992]: DEBUG nova.network.neutron [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1484.823548] env[69992]: DEBUG nova.network.neutron [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.188904] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898219, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.326656] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Releasing lock "refresh_cache-ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1485.327156] env[69992]: DEBUG nova.compute.manager [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1485.327365] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1485.328276] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e474906-3615-4d08-acd3-4fb4e311871f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.338934] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1485.339287] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9be150fc-7efd-4d7c-a025-f15b2990311f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.351262] env[69992]: DEBUG oslo_vmware.api [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1485.351262] env[69992]: value = "task-2898221" [ 1485.351262] env[69992]: _type = "Task" [ 1485.351262] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.363451] env[69992]: DEBUG oslo_vmware.api [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.687818] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898219, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.331042} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.687950] env[69992]: INFO nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034/OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034.vmdk to [datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk. [ 1485.688040] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Cleaning up location [datastore2] OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034 {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1485.688211] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c31f7d37-e90e-47b0-86c9-f229827e1034 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.688464] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7646c4e1-090b-4706-b4d1-1e24e2445257 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.695773] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1485.695773] env[69992]: value = "task-2898222" [ 1485.695773] env[69992]: _type = "Task" [ 1485.695773] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.703858] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.860724] env[69992]: DEBUG oslo_vmware.api [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898221, 'name': PowerOffVM_Task, 'duration_secs': 0.170481} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.860984] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1485.861167] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1485.861412] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abaa25a8-98de-4843-ad86-ffb50b312fd9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.888174] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1485.888403] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1485.888589] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Deleting the datastore file [datastore2] ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.888848] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5f9fee8-0bf5-4d2e-852b-d0c26772ad29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.896539] env[69992]: DEBUG oslo_vmware.api [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for the task: (returnval){ [ 1485.896539] env[69992]: value = "task-2898224" [ 1485.896539] env[69992]: _type = "Task" [ 1485.896539] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.906673] env[69992]: DEBUG oslo_vmware.api [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.206659] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088384} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.206818] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1486.206930] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1486.207193] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk to [datastore2] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1486.207452] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88c83382-9acf-4ca0-bac9-f34797f24c61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.215713] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1486.215713] env[69992]: value = "task-2898226" [ 1486.215713] env[69992]: _type = "Task" [ 1486.215713] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.224840] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.407958] env[69992]: DEBUG oslo_vmware.api [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Task: {'id': task-2898224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092318} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.408237] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1486.408427] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1486.408607] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1486.408783] env[69992]: INFO nova.compute.manager [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1486.409038] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1486.409239] env[69992]: DEBUG nova.compute.manager [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1486.409337] env[69992]: DEBUG nova.network.neutron [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1486.425997] env[69992]: DEBUG nova.network.neutron [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1486.483278] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1486.483606] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1486.727632] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898226, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.928786] env[69992]: DEBUG nova.network.neutron [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.986878] env[69992]: DEBUG nova.compute.utils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1487.229551] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898226, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.431997] env[69992]: INFO nova.compute.manager [-] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Took 1.02 seconds to deallocate network for instance. [ 1487.489729] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.729409] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898226, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.938818] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.939149] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.939432] env[69992]: DEBUG nova.objects.instance [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lazy-loading 'resources' on Instance uuid ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.230955] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898226, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.534657] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed88405d-e663-41aa-948f-670b51d2fb86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.545353] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c07001-ed89-4ea1-9208-5ee6a1e9bec3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.580479] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1488.580761] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1488.580992] env[69992]: INFO nova.compute.manager [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Attaching volume cef2194f-8e69-465e-be8f-2a99094862a4 to /dev/sdb [ 1488.586426] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba92108-0c15-4537-837a-fcb4987d2198 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.594145] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c50930-9947-4c9e-9c41-1b28d53bad38 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.610623] env[69992]: DEBUG nova.compute.provider_tree [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.616949] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6624113-8116-4f2f-b444-0376fbd84838 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.626915] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7a27cb-75b9-4d66-b807-ae5c634ecb3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.643246] env[69992]: DEBUG nova.virt.block_device [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updating existing volume attachment record: ced20141-bdb4-400e-9c96-0abe807c3d40 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1488.730313] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898226, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.474349} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.730594] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/3837388a-a31b-4d6b-97e6-ea9f24ecc066/3837388a-a31b-4d6b-97e6-ea9f24ecc066.vmdk to [datastore2] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.731398] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64c5398-418d-4d28-ad05-3607b5842fc7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.755779] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.757126] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-137da813-6306-468d-be5c-d52d37c6adfe {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.778121] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1488.778121] env[69992]: value = "task-2898227" [ 1488.778121] env[69992]: _type = "Task" [ 1488.778121] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.786338] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.019099] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1489.019359] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1489.114190] env[69992]: DEBUG nova.scheduler.client.report [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.289134] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898227, 'name': ReconfigVM_Task, 'duration_secs': 0.288949} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.289459] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Reconfigured VM instance instance-0000006f to attach disk [datastore2] b7af455d-a3a7-480f-b778-9eb3724fa6f1/b7af455d-a3a7-480f-b778-9eb3724fa6f1.vmdk or device None with type streamOptimized {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.290157] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3eb2dd2a-bbd0-4f36-915d-cd1da1b01166 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.298406] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1489.298406] env[69992]: value = "task-2898229" [ 1489.298406] env[69992]: _type = "Task" [ 1489.298406] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.309368] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898229, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.522092] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1489.619332] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.647997] env[69992]: INFO nova.scheduler.client.report [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Deleted allocations for instance ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b [ 1489.808780] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898229, 'name': Rename_Task, 'duration_secs': 0.149217} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.809127] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1489.809257] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-826d689e-3627-4c49-aecb-8b5c52b27d02 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.816710] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1489.816710] env[69992]: value = "task-2898230" [ 1489.816710] env[69992]: _type = "Task" [ 1489.816710] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.827333] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.041735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1490.042017] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1490.043582] env[69992]: INFO nova.compute.claims [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1490.155921] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6fb9f923-7622-4aeb-9315-ddb2907e0a36 tempest-ServerShowV254Test-1767743013 tempest-ServerShowV254Test-1767743013-project-member] Lock "ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.420s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.327625] env[69992]: DEBUG oslo_vmware.api [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898230, 'name': PowerOnVM_Task, 'duration_secs': 0.484501} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.327889] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1490.424946] env[69992]: DEBUG nova.compute.manager [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1490.425927] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eef2b3e-b371-4c2f-955f-11463b14c1d6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.943790] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f13b0018-a62f-4029-9a8e-e8f5cd08169c tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.697s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1491.150507] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5bbc12-aa7e-46eb-9683-8f237219f422 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.161607] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ba0f70-ecd2-4c9d-9071-f1168449d536 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.213311] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1579f4a9-3f9f-48e3-9879-4d83048007ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.225204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ba67c9-5477-47ae-a8e2-99ab9a456ff3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.246765] env[69992]: DEBUG nova.compute.provider_tree [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.390863] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1491.391134] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1491.391341] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1491.391524] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1491.391695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1491.393775] env[69992]: INFO nova.compute.manager [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Terminating instance [ 1491.750548] env[69992]: DEBUG nova.scheduler.client.report [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1491.897061] env[69992]: DEBUG nova.compute.manager [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1491.897258] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1491.898139] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715f9d40-09a4-40c3-b186-3f6c72d6f545 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.905660] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1491.905896] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2017d589-5cae-4be5-b429-7919a4e8fa73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.911965] env[69992]: DEBUG oslo_vmware.api [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1491.911965] env[69992]: value = "task-2898232" [ 1491.911965] env[69992]: _type = "Task" [ 1491.911965] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.920921] env[69992]: DEBUG oslo_vmware.api [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.254918] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1492.255324] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1492.422208] env[69992]: DEBUG oslo_vmware.api [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898232, 'name': PowerOffVM_Task, 'duration_secs': 0.198118} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.422478] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1492.422653] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1492.422903] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff0f4f76-9d0f-4d61-820f-4ae37b459e0e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.487108] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1492.487332] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1492.487515] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleting the datastore file [datastore2] b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1492.487780] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4753ccfa-839f-4dd9-91b9-105125b53c50 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.493862] env[69992]: DEBUG oslo_vmware.api [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for the task: (returnval){ [ 1492.493862] env[69992]: value = "task-2898234" [ 1492.493862] env[69992]: _type = "Task" [ 1492.493862] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.501676] env[69992]: DEBUG oslo_vmware.api [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.760197] env[69992]: DEBUG nova.compute.utils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1492.761614] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1492.761772] env[69992]: DEBUG nova.network.neutron [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1492.805177] env[69992]: DEBUG nova.policy [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdc7f71c9c4b4d40bf40b631c24b5ee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17ab89c6cf054418a4dd1a0e61b3a5e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1493.004644] env[69992]: DEBUG oslo_vmware.api [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Task: {'id': task-2898234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14278} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.004827] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1493.004995] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1493.005190] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1493.005369] env[69992]: INFO nova.compute.manager [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1493.005613] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1493.005842] env[69992]: DEBUG nova.compute.manager [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1493.005945] env[69992]: DEBUG nova.network.neutron [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1493.055072] env[69992]: DEBUG nova.network.neutron [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Successfully created port: 3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1493.195241] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1493.195485] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582153', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'name': 'volume-cef2194f-8e69-465e-be8f-2a99094862a4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5df7d031-66bf-43eb-a05b-07b6cff9db59', 'attached_at': '', 'detached_at': '', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'serial': 'cef2194f-8e69-465e-be8f-2a99094862a4'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1493.196377] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3238f94e-9696-4046-9928-2d6e82081ff3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.217204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456b6b12-1ea4-4185-8b2b-9f7eca0a1130 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.246505] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-cef2194f-8e69-465e-be8f-2a99094862a4/volume-cef2194f-8e69-465e-be8f-2a99094862a4.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.246861] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-876ef087-960b-4a83-8eb3-25cbc168c391 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.264756] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1493.270099] env[69992]: DEBUG oslo_vmware.api [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1493.270099] env[69992]: value = "task-2898235" [ 1493.270099] env[69992]: _type = "Task" [ 1493.270099] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.280330] env[69992]: DEBUG oslo_vmware.api [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898235, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.467623] env[69992]: DEBUG nova.compute.manager [req-7a558d87-7252-4389-8665-1be38cfc5781 req-a87b2a76-6b0b-4ab1-8f61-f607a26bdf33 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Received event network-vif-deleted-4d2794aa-7eaf-404a-bf09-16cf3c357511 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1493.467827] env[69992]: INFO nova.compute.manager [req-7a558d87-7252-4389-8665-1be38cfc5781 req-a87b2a76-6b0b-4ab1-8f61-f607a26bdf33 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Neutron deleted interface 4d2794aa-7eaf-404a-bf09-16cf3c357511; detaching it from the instance and deleting it from the info cache [ 1493.467997] env[69992]: DEBUG nova.network.neutron [req-7a558d87-7252-4389-8665-1be38cfc5781 req-a87b2a76-6b0b-4ab1-8f61-f607a26bdf33 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.770559] env[69992]: INFO nova.virt.block_device [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Booting with volume b76a65d3-3712-42c5-a9d4-c35d2046ba1f at /dev/sda [ 1493.783209] env[69992]: DEBUG oslo_vmware.api [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898235, 'name': ReconfigVM_Task, 'duration_secs': 0.361569} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.783493] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-cef2194f-8e69-465e-be8f-2a99094862a4/volume-cef2194f-8e69-465e-be8f-2a99094862a4.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1493.788653] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db6fed75-6dfc-4f8f-b055-cac58218706e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.804844] env[69992]: DEBUG oslo_vmware.api [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1493.804844] env[69992]: value = "task-2898236" [ 1493.804844] env[69992]: _type = "Task" [ 1493.804844] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.806020] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2df69883-30f7-49c4-badd-afefbf7c7c53 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.817363] env[69992]: DEBUG oslo_vmware.api [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898236, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.820578] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e9ce4c-c86d-4688-b150-c386813a3b73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.851696] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c4c3948-69f1-4890-817a-2c0cc39dea21 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.861009] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfd49d9-b2dd-4aa9-984c-68cf62caec32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.893768] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0417b5fd-39ae-486a-8d60-3e134d307053 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.901625] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40840894-313c-4f15-b233-fe0b1d7228ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.917019] env[69992]: DEBUG nova.virt.block_device [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating existing volume attachment record: 24b42e88-b8ea-4635-a355-7b264b5100ae {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1493.947828] env[69992]: DEBUG nova.network.neutron [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.971877] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5988d545-e4cf-4dbf-9207-d108ac1095d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.982024] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4b70c7-1dd3-4a93-a873-bcbbe2c15ae1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.011888] env[69992]: DEBUG nova.compute.manager [req-7a558d87-7252-4389-8665-1be38cfc5781 req-a87b2a76-6b0b-4ab1-8f61-f607a26bdf33 service nova] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Detach interface failed, port_id=4d2794aa-7eaf-404a-bf09-16cf3c357511, reason: Instance b7af455d-a3a7-480f-b778-9eb3724fa6f1 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1494.316213] env[69992]: DEBUG oslo_vmware.api [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898236, 'name': ReconfigVM_Task, 'duration_secs': 0.139163} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.316526] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582153', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'name': 'volume-cef2194f-8e69-465e-be8f-2a99094862a4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5df7d031-66bf-43eb-a05b-07b6cff9db59', 'attached_at': '', 'detached_at': '', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'serial': 'cef2194f-8e69-465e-be8f-2a99094862a4'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1494.450510] env[69992]: INFO nova.compute.manager [-] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Took 1.44 seconds to deallocate network for instance. [ 1494.493918] env[69992]: DEBUG nova.compute.manager [req-4774a42e-6138-4859-b28f-ef59358bc182 req-d3bb7e97-c127-4ab0-8f21-9eaa083e8f2c service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Received event network-vif-plugged-3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1494.494151] env[69992]: DEBUG oslo_concurrency.lockutils [req-4774a42e-6138-4859-b28f-ef59358bc182 req-d3bb7e97-c127-4ab0-8f21-9eaa083e8f2c service nova] Acquiring lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.494353] env[69992]: DEBUG oslo_concurrency.lockutils [req-4774a42e-6138-4859-b28f-ef59358bc182 req-d3bb7e97-c127-4ab0-8f21-9eaa083e8f2c service nova] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.494523] env[69992]: DEBUG oslo_concurrency.lockutils [req-4774a42e-6138-4859-b28f-ef59358bc182 req-d3bb7e97-c127-4ab0-8f21-9eaa083e8f2c service nova] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1494.494863] env[69992]: DEBUG nova.compute.manager [req-4774a42e-6138-4859-b28f-ef59358bc182 req-d3bb7e97-c127-4ab0-8f21-9eaa083e8f2c service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] No waiting events found dispatching network-vif-plugged-3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1494.494939] env[69992]: WARNING nova.compute.manager [req-4774a42e-6138-4859-b28f-ef59358bc182 req-d3bb7e97-c127-4ab0-8f21-9eaa083e8f2c service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Received unexpected event network-vif-plugged-3319610f-82c5-4e8a-85bc-ec2d73b68ebe for instance with vm_state building and task_state block_device_mapping. [ 1494.586708] env[69992]: DEBUG nova.network.neutron [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Successfully updated port: 3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1494.958246] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1494.958538] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1494.958764] env[69992]: DEBUG nova.objects.instance [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lazy-loading 'resources' on Instance uuid b7af455d-a3a7-480f-b778-9eb3724fa6f1 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.089812] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.090043] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1495.090192] env[69992]: DEBUG nova.network.neutron [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1495.361916] env[69992]: DEBUG nova.objects.instance [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'flavor' on Instance uuid 5df7d031-66bf-43eb-a05b-07b6cff9db59 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.576264] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36053f6e-c3cd-42be-a38f-8cfc16d6d3b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.586249] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ce1b53-3b83-4c99-8183-9e06f7e0e3bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.621563] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74363576-faa0-4bd0-bd52-5ad155a46112 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.632838] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ea4073-f124-416d-accb-5fcade0eda92 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.648692] env[69992]: DEBUG nova.compute.provider_tree [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.650623] env[69992]: DEBUG nova.network.neutron [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1495.792942] env[69992]: DEBUG nova.network.neutron [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.867439] env[69992]: DEBUG oslo_concurrency.lockutils [None req-5cdbd12c-7de6-4d30-a3aa-69d4592cc284 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.286s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1496.006056] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1496.006685] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1496.006955] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.007174] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1496.007398] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.007578] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1496.007756] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1496.008022] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1496.008213] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1496.008399] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1496.008567] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1496.008737] env[69992]: DEBUG nova.virt.hardware [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1496.009650] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced6cc20-7bab-4087-83b6-d889fd7de5fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.018517] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012d2d33-58fc-4b31-b213-07cb28a07925 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.128242] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1496.128513] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1496.154206] env[69992]: DEBUG nova.scheduler.client.report [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1496.295850] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1496.296186] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Instance network_info: |[{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1496.296623] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:37:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46785c9c-8b22-487d-a854-b3e67c5ed1d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3319610f-82c5-4e8a-85bc-ec2d73b68ebe', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1496.304295] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1496.304872] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1496.305139] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88abcb8a-4bc6-4a87-ab25-38b41ad77eaf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.328795] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1496.328795] env[69992]: value = "task-2898237" [ 1496.328795] env[69992]: _type = "Task" [ 1496.328795] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.336944] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898237, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.518369] env[69992]: DEBUG nova.compute.manager [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Received event network-changed-3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1496.518622] env[69992]: DEBUG nova.compute.manager [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Refreshing instance network info cache due to event network-changed-3319610f-82c5-4e8a-85bc-ec2d73b68ebe. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1496.518814] env[69992]: DEBUG oslo_concurrency.lockutils [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] Acquiring lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.518965] env[69992]: DEBUG oslo_concurrency.lockutils [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] Acquired lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1496.519173] env[69992]: DEBUG nova.network.neutron [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Refreshing network info cache for port 3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1496.631470] env[69992]: INFO nova.compute.manager [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Detaching volume cef2194f-8e69-465e-be8f-2a99094862a4 [ 1496.659849] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1496.665571] env[69992]: INFO nova.virt.block_device [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Attempting to driver detach volume cef2194f-8e69-465e-be8f-2a99094862a4 from mountpoint /dev/sdb [ 1496.665800] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1496.665983] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582153', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'name': 'volume-cef2194f-8e69-465e-be8f-2a99094862a4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5df7d031-66bf-43eb-a05b-07b6cff9db59', 'attached_at': '', 'detached_at': '', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'serial': 'cef2194f-8e69-465e-be8f-2a99094862a4'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1496.666838] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67937c56-2a98-4144-aa05-af6de3503f6b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.688380] env[69992]: INFO nova.scheduler.client.report [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Deleted allocations for instance b7af455d-a3a7-480f-b778-9eb3724fa6f1 [ 1496.689872] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d959733f-138c-4d0d-b1e8-8afa98fc51a3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.700954] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb216290-f563-49a1-8e9f-03439f3fa5e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.720965] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ac2fac-095e-4cc0-82d6-6888532305d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.737792] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] The volume has not been displaced from its original location: [datastore2] volume-cef2194f-8e69-465e-be8f-2a99094862a4/volume-cef2194f-8e69-465e-be8f-2a99094862a4.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1496.743054] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1496.744293] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00b990d3-169c-4955-abfa-820a5085a095 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.762720] env[69992]: DEBUG oslo_vmware.api [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1496.762720] env[69992]: value = "task-2898238" [ 1496.762720] env[69992]: _type = "Task" [ 1496.762720] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.770972] env[69992]: DEBUG oslo_vmware.api [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898238, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.839229] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898237, 'name': CreateVM_Task, 'duration_secs': 0.328639} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.839436] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1496.840100] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'disk_bus': None, 'device_type': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582150', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'name': 'volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b549cd4-bfdc-45c5-9031-9b378ad7ee79', 'attached_at': '', 'detached_at': '', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'serial': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f'}, 'attachment_id': '24b42e88-b8ea-4635-a355-7b264b5100ae', 'delete_on_termination': True, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=69992) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1496.840325] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Root volume attach. Driver type: vmdk {{(pid=69992) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1496.841133] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37e181f-5577-459a-9494-31a6110fd34a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.848648] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5185cac9-6355-4f46-89e1-15182707f9ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.854644] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82deba68-6290-4e14-8a27-dd68abe3c65b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.860269] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-df59b0d3-8221-4081-8c9e-82a0d7d9b466 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.868385] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1496.868385] env[69992]: value = "task-2898239" [ 1496.868385] env[69992]: _type = "Task" [ 1496.868385] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.877952] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.199391] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3e584950-e992-4674-9177-3ad6359ec7cb tempest-ServerActionsTestOtherB-873622132 tempest-ServerActionsTestOtherB-873622132-project-member] Lock "b7af455d-a3a7-480f-b778-9eb3724fa6f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.808s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1497.274373] env[69992]: DEBUG oslo_vmware.api [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898238, 'name': ReconfigVM_Task, 'duration_secs': 0.245243} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.274784] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1497.279714] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21cb2f45-e871-4e4b-8ddf-1125768a151d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.300523] env[69992]: DEBUG oslo_vmware.api [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1497.300523] env[69992]: value = "task-2898240" [ 1497.300523] env[69992]: _type = "Task" [ 1497.300523] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.311760] env[69992]: DEBUG oslo_vmware.api [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898240, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.383433] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task} progress is 42%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.563405] env[69992]: DEBUG nova.network.neutron [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updated VIF entry in instance network info cache for port 3319610f-82c5-4e8a-85bc-ec2d73b68ebe. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1497.563839] env[69992]: DEBUG nova.network.neutron [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.812586] env[69992]: DEBUG oslo_vmware.api [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898240, 'name': ReconfigVM_Task, 'duration_secs': 0.171997} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.812830] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582153', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'name': 'volume-cef2194f-8e69-465e-be8f-2a99094862a4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5df7d031-66bf-43eb-a05b-07b6cff9db59', 'attached_at': '', 'detached_at': '', 'volume_id': 'cef2194f-8e69-465e-be8f-2a99094862a4', 'serial': 'cef2194f-8e69-465e-be8f-2a99094862a4'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1497.881858] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task} progress is 56%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.067282] env[69992]: DEBUG oslo_concurrency.lockutils [req-6f36a311-cc02-43c2-b984-329f47c9e3fb req-d1015cae-5ccc-4310-b2a1-bef44dcf776f service nova] Releasing lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1498.357363] env[69992]: DEBUG nova.objects.instance [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'flavor' on Instance uuid 5df7d031-66bf-43eb-a05b-07b6cff9db59 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1498.382768] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task} progress is 71%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.884607] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task} progress is 84%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.366208] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6055d973-e744-4309-aa32-0e997b8001d0 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.237s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.380667] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task} progress is 97%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.881840] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898239, 'name': RelocateVM_Task, 'duration_secs': 3.008107} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.882140] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1499.882350] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582150', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'name': 'volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b549cd4-bfdc-45c5-9031-9b378ad7ee79', 'attached_at': '', 'detached_at': '', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'serial': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1499.883114] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8294a685-ab33-43c6-b1d3-5ee9d99f63fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.901595] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980d891e-986e-4793-b446-d6fe286f388b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.923914] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f/volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.924503] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c35ab345-6581-4749-905c-f80d9e8ad12b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.954395] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1499.954395] env[69992]: value = "task-2898242" [ 1499.954395] env[69992]: _type = "Task" [ 1499.954395] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.963135] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898242, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.363645] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.363972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.364196] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "5df7d031-66bf-43eb-a05b-07b6cff9db59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.364415] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.364592] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1500.367058] env[69992]: INFO nova.compute.manager [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Terminating instance [ 1500.465300] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898242, 'name': ReconfigVM_Task, 'duration_secs': 0.26694} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.465564] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f/volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1500.470263] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b3ff3d5-3360-47cd-a398-dbe1c6f4e6e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.484988] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1500.484988] env[69992]: value = "task-2898243" [ 1500.484988] env[69992]: _type = "Task" [ 1500.484988] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.493191] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898243, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.870741] env[69992]: DEBUG nova.compute.manager [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1500.870986] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1500.871872] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b42d0d-c6e2-4106-95c8-7ee397d10105 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.879721] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1500.879953] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7179a6dd-0c56-46bd-a506-725fad92561c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.886469] env[69992]: DEBUG oslo_vmware.api [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1500.886469] env[69992]: value = "task-2898244" [ 1500.886469] env[69992]: _type = "Task" [ 1500.886469] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.894165] env[69992]: DEBUG oslo_vmware.api [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.996103] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898243, 'name': ReconfigVM_Task, 'duration_secs': 0.136289} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.996475] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582150', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'name': 'volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7b549cd4-bfdc-45c5-9031-9b378ad7ee79', 'attached_at': '', 'detached_at': '', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'serial': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1500.996951] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61a016a1-88d4-4c39-8abe-f6bd85daab49 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.005142] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1501.005142] env[69992]: value = "task-2898245" [ 1501.005142] env[69992]: _type = "Task" [ 1501.005142] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.014045] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898245, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.397327] env[69992]: DEBUG oslo_vmware.api [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898244, 'name': PowerOffVM_Task, 'duration_secs': 0.206578} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.397622] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1501.397797] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1501.398073] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bce35c33-ff5d-4807-8e03-f91103608dd1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.515315] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898245, 'name': Rename_Task, 'duration_secs': 0.135935} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.515589] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1501.515886] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc5932cf-59a7-46d2-b1f9-969735e354ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.524627] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1501.524627] env[69992]: value = "task-2898247" [ 1501.524627] env[69992]: _type = "Task" [ 1501.524627] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.532051] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.035132] env[69992]: DEBUG oslo_vmware.api [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898247, 'name': PowerOnVM_Task, 'duration_secs': 0.478545} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.035519] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1502.035607] env[69992]: INFO nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Took 6.03 seconds to spawn the instance on the hypervisor. [ 1502.035794] env[69992]: DEBUG nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1502.036614] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85bbdbd-853b-422d-bf92-26dfbeed0979 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.197874] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1502.198289] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1502.198472] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleting the datastore file [datastore1] 5df7d031-66bf-43eb-a05b-07b6cff9db59 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1502.198802] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b19f8b7-910f-4da2-9875-5ebd080bfdef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.210735] env[69992]: DEBUG oslo_vmware.api [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1502.210735] env[69992]: value = "task-2898248" [ 1502.210735] env[69992]: _type = "Task" [ 1502.210735] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.222681] env[69992]: DEBUG oslo_vmware.api [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.554619] env[69992]: INFO nova.compute.manager [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Took 12.53 seconds to build instance. [ 1502.721023] env[69992]: DEBUG oslo_vmware.api [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14629} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.721292] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1502.721511] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1502.721694] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1502.721873] env[69992]: INFO nova.compute.manager [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Took 1.85 seconds to destroy the instance on the hypervisor. [ 1502.722130] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1502.722326] env[69992]: DEBUG nova.compute.manager [-] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1502.722421] env[69992]: DEBUG nova.network.neutron [-] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1503.057369] env[69992]: DEBUG oslo_concurrency.lockutils [None req-a0d13f92-b668-41fa-9662-95c29a64d01c tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.038s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1503.193295] env[69992]: DEBUG nova.compute.manager [req-566bbc4c-ce68-4d0a-b4b9-37334c0d85b6 req-9ccb606b-06eb-4702-9410-236c6d1d4a75 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Received event network-vif-deleted-7c7f4aa4-cd49-487f-8637-9ee035bbab41 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1503.193496] env[69992]: INFO nova.compute.manager [req-566bbc4c-ce68-4d0a-b4b9-37334c0d85b6 req-9ccb606b-06eb-4702-9410-236c6d1d4a75 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Neutron deleted interface 7c7f4aa4-cd49-487f-8637-9ee035bbab41; detaching it from the instance and deleting it from the info cache [ 1503.193669] env[69992]: DEBUG nova.network.neutron [req-566bbc4c-ce68-4d0a-b4b9-37334c0d85b6 req-9ccb606b-06eb-4702-9410-236c6d1d4a75 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.678695] env[69992]: DEBUG nova.network.neutron [-] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.699278] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5cc4551-b187-450e-be82-676cd0cf8772 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.708454] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8350a588-1a5f-4101-acc0-09a2270f54dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.743021] env[69992]: DEBUG nova.compute.manager [req-566bbc4c-ce68-4d0a-b4b9-37334c0d85b6 req-9ccb606b-06eb-4702-9410-236c6d1d4a75 service nova] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Detach interface failed, port_id=7c7f4aa4-cd49-487f-8637-9ee035bbab41, reason: Instance 5df7d031-66bf-43eb-a05b-07b6cff9db59 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1503.968814] env[69992]: DEBUG nova.compute.manager [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Received event network-changed-ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1503.968985] env[69992]: DEBUG nova.compute.manager [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Refreshing instance network info cache due to event network-changed-ebc337ca-1f7f-449a-85a1-1af599dd4a19. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1503.969282] env[69992]: DEBUG oslo_concurrency.lockutils [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] Acquiring lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.969482] env[69992]: DEBUG oslo_concurrency.lockutils [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] Acquired lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1503.969660] env[69992]: DEBUG nova.network.neutron [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Refreshing network info cache for port ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1504.180378] env[69992]: INFO nova.compute.manager [-] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Took 1.46 seconds to deallocate network for instance. [ 1504.689601] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1504.689907] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1504.690157] env[69992]: DEBUG nova.objects.instance [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'resources' on Instance uuid 5df7d031-66bf-43eb-a05b-07b6cff9db59 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.818198] env[69992]: DEBUG nova.network.neutron [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updated VIF entry in instance network info cache for port ebc337ca-1f7f-449a-85a1-1af599dd4a19. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1504.819494] env[69992]: DEBUG nova.network.neutron [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updating instance_info_cache with network_info: [{"id": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "address": "fa:16:3e:8e:40:95", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebc337ca-1f", "ovs_interfaceid": "ebc337ca-1f7f-449a-85a1-1af599dd4a19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.287079] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab176f2-c1c0-44ad-bcf9-3e62dd197ca7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.295233] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f07a0b-aad5-4c1c-94d8-4d9c5928dcb7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.328672] env[69992]: DEBUG oslo_concurrency.lockutils [req-3fb4f682-7252-4e0e-8368-d2a22b565e6e req-1f285e41-d4f1-41ec-9d9e-8aa1f57a57ca service nova] Releasing lock "refresh_cache-88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1505.329800] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71093f7-abec-4ad5-a825-c6c137bbcaab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.338243] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124026e5-9017-46b2-9a66-a9ba06ca7394 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.352292] env[69992]: DEBUG nova.compute.provider_tree [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.459553] env[69992]: DEBUG nova.compute.manager [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1505.855124] env[69992]: DEBUG nova.scheduler.client.report [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1505.973834] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1505.996350] env[69992]: DEBUG nova.compute.manager [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Received event network-changed-3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1505.996493] env[69992]: DEBUG nova.compute.manager [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Refreshing instance network info cache due to event network-changed-3319610f-82c5-4e8a-85bc-ec2d73b68ebe. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1505.996712] env[69992]: DEBUG oslo_concurrency.lockutils [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] Acquiring lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.996822] env[69992]: DEBUG oslo_concurrency.lockutils [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] Acquired lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1505.997391] env[69992]: DEBUG nova.network.neutron [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Refreshing network info cache for port 3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1506.281600] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.281853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.360970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.363547] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.390s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.379139] env[69992]: INFO nova.scheduler.client.report [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted allocations for instance 5df7d031-66bf-43eb-a05b-07b6cff9db59 [ 1506.786819] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1506.801334] env[69992]: DEBUG nova.network.neutron [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updated VIF entry in instance network info cache for port 3319610f-82c5-4e8a-85bc-ec2d73b68ebe. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1506.801682] env[69992]: DEBUG nova.network.neutron [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.868460] env[69992]: INFO nova.compute.claims [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1506.887547] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1be3e2c-6c1b-4aee-bdc0-479848631fd9 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "5df7d031-66bf-43eb-a05b-07b6cff9db59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.524s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1507.304857] env[69992]: DEBUG oslo_concurrency.lockutils [req-bb146fc8-0a15-454c-b4c5-bce1c4ffde6a req-93087fad-51f9-4501-96ac-d7bd5ec838d4 service nova] Releasing lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1507.310313] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.374691] env[69992]: INFO nova.compute.resource_tracker [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating resource usage from migration 685133dc-8ed1-4552-b3aa-faf26d22ebb4 [ 1507.452807] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df370bbc-b096-487f-964a-0bb455ed6a4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.460849] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7113d9e-55a7-4684-a028-670e063169ad {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.491008] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399067a3-63c1-48dc-a603-991d64a77a05 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.498189] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bb61ca-320e-470c-8d5a-bc735d9c4342 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.510964] env[69992]: DEBUG nova.compute.provider_tree [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.013772] env[69992]: DEBUG nova.scheduler.client.report [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1508.320761] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1508.321000] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.519338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.156s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1508.519652] env[69992]: INFO nova.compute.manager [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Migrating [ 1508.526045] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.216s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1508.527395] env[69992]: INFO nova.compute.claims [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.823278] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1509.038563] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.039563] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1509.039848] env[69992]: DEBUG nova.network.neutron [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1509.348318] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1509.646793] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b59f978-accf-4b4b-9612-e6b1cc7cdd68 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.654555] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8daf6dc-457d-49ab-bad6-0206a721abc5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.688117] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9720e1c-6352-4135-8156-f1dfb245238f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.695314] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d331cd8e-4f00-45cf-8a64-e8ee87a4b11a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.710198] env[69992]: DEBUG nova.compute.provider_tree [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.783255] env[69992]: DEBUG nova.network.neutron [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.216710] env[69992]: DEBUG nova.scheduler.client.report [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1510.286355] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1510.719631] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1510.720114] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1510.722853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.375s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1510.724424] env[69992]: INFO nova.compute.claims [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1511.229445] env[69992]: DEBUG nova.compute.utils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1511.233848] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1511.234089] env[69992]: DEBUG nova.network.neutron [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1511.274696] env[69992]: DEBUG nova.policy [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8a2771437b5438e9b1b8436cb117fe1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '726ff8ac2fef439eb92c8e16f6365f78', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1511.530370] env[69992]: DEBUG nova.network.neutron [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Successfully created port: bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1511.735238] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1511.801988] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ea6784-d334-42f1-9131-7fa4b2668b8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.825738] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1511.850620] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f58b536-0f8a-4963-8911-3d6cb2104257 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.857604] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d30bbd-3763-4645-884f-69cd15658081 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.888648] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d445e826-29ef-4d2c-b817-a142cbdf95f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.895700] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a77933-b8c0-446f-a799-480f1963e461 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.908959] env[69992]: DEBUG nova.compute.provider_tree [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1512.333318] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1512.333608] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b657c0f-bfc5-493b-8f85-8c2f5a51bf74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.342796] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1512.342796] env[69992]: value = "task-2898249" [ 1512.342796] env[69992]: _type = "Task" [ 1512.342796] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.350937] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898249, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.411837] env[69992]: DEBUG nova.scheduler.client.report [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1512.605057] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.609682] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.749159] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1512.781285] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1512.781599] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1512.781771] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1512.781959] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1512.782177] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1512.782354] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1512.782545] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1512.782711] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1512.782883] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1512.783070] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1512.783434] env[69992]: DEBUG nova.virt.hardware [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1512.784135] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e19240-579a-4cf3-83c4-b42b7c4eba09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.792267] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab03b2e-4f1c-449b-b909-8b045512e605 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.852696] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898249, 'name': PowerOffVM_Task, 'duration_secs': 0.230857} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.852696] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1512.852871] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1512.890510] env[69992]: DEBUG nova.compute.manager [req-e51d3f7d-9dfb-4389-b9d6-44386dd4c844 req-442fa6b7-d4b8-4394-a053-9471ea9a4f0c service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Received event network-vif-plugged-bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1512.890611] env[69992]: DEBUG oslo_concurrency.lockutils [req-e51d3f7d-9dfb-4389-b9d6-44386dd4c844 req-442fa6b7-d4b8-4394-a053-9471ea9a4f0c service nova] Acquiring lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1512.891389] env[69992]: DEBUG oslo_concurrency.lockutils [req-e51d3f7d-9dfb-4389-b9d6-44386dd4c844 req-442fa6b7-d4b8-4394-a053-9471ea9a4f0c service nova] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1512.891389] env[69992]: DEBUG oslo_concurrency.lockutils [req-e51d3f7d-9dfb-4389-b9d6-44386dd4c844 req-442fa6b7-d4b8-4394-a053-9471ea9a4f0c service nova] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1512.891389] env[69992]: DEBUG nova.compute.manager [req-e51d3f7d-9dfb-4389-b9d6-44386dd4c844 req-442fa6b7-d4b8-4394-a053-9471ea9a4f0c service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] No waiting events found dispatching network-vif-plugged-bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1512.891718] env[69992]: WARNING nova.compute.manager [req-e51d3f7d-9dfb-4389-b9d6-44386dd4c844 req-442fa6b7-d4b8-4394-a053-9471ea9a4f0c service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Received unexpected event network-vif-plugged-bd85f0c5-e04c-4955-b2f0-952380a45baa for instance with vm_state building and task_state spawning. [ 1512.916735] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1512.917343] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1512.969991] env[69992]: DEBUG nova.network.neutron [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Successfully updated port: bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1513.360981] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1513.361604] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1513.361890] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1513.362093] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1513.362294] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1513.367421] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f0c1bdf-25a0-4d70-ab83-ab5146d14764 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.383078] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1513.383078] env[69992]: value = "task-2898250" [ 1513.383078] env[69992]: _type = "Task" [ 1513.383078] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.393977] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.424700] env[69992]: DEBUG nova.compute.utils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1513.426902] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1513.426902] env[69992]: DEBUG nova.network.neutron [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1513.464922] env[69992]: DEBUG nova.policy [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94f19c179a3545089bcc66b7e5dc36e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4da04b8933ad4d2ba4b1c193853f31b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1513.472518] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "refresh_cache-e7539d23-b4bb-48e8-89f4-ba98e6a12a01" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.472681] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquired lock "refresh_cache-e7539d23-b4bb-48e8-89f4-ba98e6a12a01" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1513.472904] env[69992]: DEBUG nova.network.neutron [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1513.609624] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.609853] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.610036] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.728382] env[69992]: DEBUG nova.network.neutron [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Successfully created port: ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1513.893777] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898250, 'name': ReconfigVM_Task, 'duration_secs': 0.142751} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.894069] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1513.929884] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1514.004190] env[69992]: DEBUG nova.network.neutron [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1514.119117] env[69992]: DEBUG nova.network.neutron [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Updating instance_info_cache with network_info: [{"id": "bd85f0c5-e04c-4955-b2f0-952380a45baa", "address": "fa:16:3e:0d:e7:09", "network": {"id": "9ddf4a94-71d3-42d5-b6a8-acbf9e8f481b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1760716510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "726ff8ac2fef439eb92c8e16f6365f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd85f0c5-e0", "ovs_interfaceid": "bd85f0c5-e04c-4955-b2f0-952380a45baa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.401031] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1514.401031] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1514.402784] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1514.403126] env[69992]: DEBUG nova.virt.hardware [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1514.408220] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1514.408503] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22d009a5-a7a0-4290-a3b7-69007f06d942 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.426920] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1514.426920] env[69992]: value = "task-2898251" [ 1514.426920] env[69992]: _type = "Task" [ 1514.426920] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.437979] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898251, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.609090] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.621424] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Releasing lock "refresh_cache-e7539d23-b4bb-48e8-89f4-ba98e6a12a01" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1514.621740] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Instance network_info: |[{"id": "bd85f0c5-e04c-4955-b2f0-952380a45baa", "address": "fa:16:3e:0d:e7:09", "network": {"id": "9ddf4a94-71d3-42d5-b6a8-acbf9e8f481b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1760716510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "726ff8ac2fef439eb92c8e16f6365f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd85f0c5-e0", "ovs_interfaceid": "bd85f0c5-e04c-4955-b2f0-952380a45baa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1514.622413] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:e7:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db00ec2e-3155-46b6-8170-082f7d86dbe7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd85f0c5-e04c-4955-b2f0-952380a45baa', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.629879] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Creating folder: Project (726ff8ac2fef439eb92c8e16f6365f78). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.630397] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44ab884f-ada0-4203-8d70-e19aee0b4b01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.641760] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Created folder: Project (726ff8ac2fef439eb92c8e16f6365f78) in parent group-v581821. [ 1514.641944] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Creating folder: Instances. Parent ref: group-v582155. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.642233] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca267b06-655e-40cc-80b8-ffcd1fc02950 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.652189] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Created folder: Instances in parent group-v582155. [ 1514.652189] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1514.652362] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.652562] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0731644f-2a6c-4327-9e4f-f9a850c8ad00 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.671504] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.671504] env[69992]: value = "task-2898254" [ 1514.671504] env[69992]: _type = "Task" [ 1514.671504] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.678788] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898254, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.918991] env[69992]: DEBUG nova.compute.manager [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Received event network-changed-bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1514.919274] env[69992]: DEBUG nova.compute.manager [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Refreshing instance network info cache due to event network-changed-bd85f0c5-e04c-4955-b2f0-952380a45baa. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1514.919467] env[69992]: DEBUG oslo_concurrency.lockutils [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] Acquiring lock "refresh_cache-e7539d23-b4bb-48e8-89f4-ba98e6a12a01" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.919507] env[69992]: DEBUG oslo_concurrency.lockutils [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] Acquired lock "refresh_cache-e7539d23-b4bb-48e8-89f4-ba98e6a12a01" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1514.919795] env[69992]: DEBUG nova.network.neutron [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Refreshing network info cache for port bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1514.936131] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898251, 'name': ReconfigVM_Task, 'duration_secs': 0.167616} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.936394] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1514.937183] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04effc1-4c2a-44d0-ac43-df9fc8e5d0b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.940636] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1514.965644] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f/volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1514.968236] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba3d0942-1de1-46a8-956a-4cc0b8e877e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.986357] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1514.986357] env[69992]: value = "task-2898255" [ 1514.986357] env[69992]: _type = "Task" [ 1514.986357] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.988467] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1514.988698] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.988856] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1514.989057] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.989212] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1514.989365] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1514.989571] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1514.989731] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1514.989897] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1514.990080] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1514.990259] env[69992]: DEBUG nova.virt.hardware [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1514.991128] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe848e5-88e2-42aa-9a65-cf43cecda004 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.004329] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5f70f8-81bf-46b7-aeef-484e486b416f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.009550] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898255, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.112871] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1515.113284] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1515.113518] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1515.113699] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1515.114670] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b934ee1-1b49-4a5e-882a-6b15920985e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.122655] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f23431f-a878-4ef2-b6be-c04bbf5019ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.135878] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228c543f-3a27-4c75-8e8d-3b22aca75679 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.141874] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ea93bc-c55d-4344-b0a1-651dc522fddc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.172215] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180239MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1515.172469] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1515.172765] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1515.182344] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898254, 'name': CreateVM_Task, 'duration_secs': 0.309704} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.182657] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1515.183353] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.183583] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1515.183956] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1515.184271] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc65f7c9-c490-4c2b-b2df-5a6ad938e212 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.188713] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1515.188713] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523af403-8e3b-b749-02f1-cfcb5fc98689" [ 1515.188713] env[69992]: _type = "Task" [ 1515.188713] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.196425] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523af403-8e3b-b749-02f1-cfcb5fc98689, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.247771] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1515.248058] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1515.248266] env[69992]: DEBUG nova.compute.manager [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1515.249147] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d43282-9030-4e33-9cd7-ec160e6022d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.256219] env[69992]: DEBUG nova.compute.manager [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1515.256763] env[69992]: DEBUG nova.objects.instance [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'flavor' on Instance uuid 90facf1a-ae81-4259-bf75-94779267699c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1515.433115] env[69992]: DEBUG nova.network.neutron [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Successfully updated port: ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1515.499814] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898255, 'name': ReconfigVM_Task, 'duration_secs': 0.25267} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.502145] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfigured VM instance instance-00000079 to attach disk [datastore1] volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f/volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1515.502425] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1515.632298] env[69992]: DEBUG nova.network.neutron [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Updated VIF entry in instance network info cache for port bd85f0c5-e04c-4955-b2f0-952380a45baa. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1515.632659] env[69992]: DEBUG nova.network.neutron [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Updating instance_info_cache with network_info: [{"id": "bd85f0c5-e04c-4955-b2f0-952380a45baa", "address": "fa:16:3e:0d:e7:09", "network": {"id": "9ddf4a94-71d3-42d5-b6a8-acbf9e8f481b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1760716510-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "726ff8ac2fef439eb92c8e16f6365f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd85f0c5-e0", "ovs_interfaceid": "bd85f0c5-e04c-4955-b2f0-952380a45baa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.699021] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523af403-8e3b-b749-02f1-cfcb5fc98689, 'name': SearchDatastore_Task, 'duration_secs': 0.011717} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.699320] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.699561] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1515.699792] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.699938] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1515.700131] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.700378] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-340879a2-8c27-41a1-931e-8748ca6b158c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.708944] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.709128] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1515.709782] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d6369bc-3326-4af5-8229-f5a71f24debd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.714917] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1515.714917] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524f3094-e8c0-9764-4f29-6443582468c1" [ 1515.714917] env[69992]: _type = "Task" [ 1515.714917] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.721711] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524f3094-e8c0-9764-4f29-6443582468c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.932750] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.933020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1515.933020] env[69992]: DEBUG nova.network.neutron [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1516.009457] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263f0f44-4515-4b0d-8ac0-8bc8870cb4a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.030511] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415c1f7e-0f20-46f7-9d4a-4849e46cacc2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.047471] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1516.135012] env[69992]: DEBUG oslo_concurrency.lockutils [req-d48e0c9a-56a6-4a84-a05a-9890ab0425ff req-0d1c34c0-3db5-4921-bf2c-5ac14da0ebca service nova] Releasing lock "refresh_cache-e7539d23-b4bb-48e8-89f4-ba98e6a12a01" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1516.183444] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Applying migration context for instance 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 as it has an incoming, in-progress migration 685133dc-8ed1-4552-b3aa-faf26d22ebb4. Migration status is migrating {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1516.184433] env[69992]: INFO nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating resource usage from migration 685133dc-8ed1-4552-b3aa-faf26d22ebb4 [ 1516.203804] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.203950] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.204088] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 90facf1a-ae81-4259-bf75-94779267699c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.204211] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration 685133dc-8ed1-4552-b3aa-faf26d22ebb4 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1516.204331] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.204443] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e7539d23-b4bb-48e8-89f4-ba98e6a12a01 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.204556] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 6df2b739-02c0-40ac-b2a2-14587e3996bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.204734] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1516.204867] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1516.220449] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1516.228110] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524f3094-e8c0-9764-4f29-6443582468c1, 'name': SearchDatastore_Task, 'duration_secs': 0.008551} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.228863] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60c1ef6b-5c9a-47ce-9788-2ca45996a6c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.232107] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1516.232282] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1516.235196] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1516.235196] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f5384e-3169-ab92-ba67-feceec2b2e4d" [ 1516.235196] env[69992]: _type = "Task" [ 1516.235196] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.242325] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f5384e-3169-ab92-ba67-feceec2b2e4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.243052] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1516.259736] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1516.262921] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1516.263172] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dab8e583-bd6e-4138-aa0d-926ea261e845 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.270463] env[69992]: DEBUG oslo_vmware.api [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1516.270463] env[69992]: value = "task-2898256" [ 1516.270463] env[69992]: _type = "Task" [ 1516.270463] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.280011] env[69992]: DEBUG oslo_vmware.api [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.352199] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d75abce-2a4b-47df-8db3-60b4b4c8e5f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.359788] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7477a2b-30e1-45cc-8f32-564d8aeea613 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.390190] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ce55ef-1d0f-4635-b928-deadec47bb35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.397510] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee03c663-9559-4080-a52a-5a45fa3580c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.411481] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1516.465591] env[69992]: DEBUG nova.network.neutron [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1516.594153] env[69992]: DEBUG nova.network.neutron [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updating instance_info_cache with network_info: [{"id": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "address": "fa:16:3e:4f:a5:1f", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba5a70da-7d", "ovs_interfaceid": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.745595] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f5384e-3169-ab92-ba67-feceec2b2e4d, 'name': SearchDatastore_Task, 'duration_secs': 0.009741} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.745845] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1516.746122] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e7539d23-b4bb-48e8-89f4-ba98e6a12a01/e7539d23-b4bb-48e8-89f4-ba98e6a12a01.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1516.746376] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d87ef823-b9c1-4dda-ae96-fdd3e1aea8be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.752508] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1516.752508] env[69992]: value = "task-2898257" [ 1516.752508] env[69992]: _type = "Task" [ 1516.752508] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.759387] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.778705] env[69992]: DEBUG oslo_vmware.api [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898256, 'name': PowerOffVM_Task, 'duration_secs': 0.212425} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.778908] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1516.779230] env[69992]: DEBUG nova.compute.manager [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1516.779938] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9f7292-3753-4788-91aa-6aa65d5be27b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.915437] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1516.951097] env[69992]: DEBUG nova.compute.manager [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Received event network-vif-plugged-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1516.951353] env[69992]: DEBUG oslo_concurrency.lockutils [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1516.951541] env[69992]: DEBUG oslo_concurrency.lockutils [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.951645] env[69992]: DEBUG oslo_concurrency.lockutils [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1516.951839] env[69992]: DEBUG nova.compute.manager [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] No waiting events found dispatching network-vif-plugged-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1516.951984] env[69992]: WARNING nova.compute.manager [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Received unexpected event network-vif-plugged-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 for instance with vm_state building and task_state spawning. [ 1516.952158] env[69992]: DEBUG nova.compute.manager [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Received event network-changed-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1516.952313] env[69992]: DEBUG nova.compute.manager [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Refreshing instance network info cache due to event network-changed-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1516.952483] env[69992]: DEBUG oslo_concurrency.lockutils [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] Acquiring lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.097067] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1517.097404] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Instance network_info: |[{"id": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "address": "fa:16:3e:4f:a5:1f", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba5a70da-7d", "ovs_interfaceid": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1517.097967] env[69992]: DEBUG oslo_concurrency.lockutils [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] Acquired lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1517.098191] env[69992]: DEBUG nova.network.neutron [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Refreshing network info cache for port ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.099392] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:a5:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba5a70da-7de9-4267-a3cf-1cdbabc9aa22', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.106992] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1517.108239] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.108480] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b4e2e55-481e-4923-b0ad-e95b8bcc8471 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.129834] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.129834] env[69992]: value = "task-2898258" [ 1517.129834] env[69992]: _type = "Task" [ 1517.129834] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.138800] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898258, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.264082] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898257, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45887} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.264352] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e7539d23-b4bb-48e8-89f4-ba98e6a12a01/e7539d23-b4bb-48e8-89f4-ba98e6a12a01.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1517.264564] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1517.264807] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89b930f8-e1da-4177-a8bc-61f84290f7dd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.271364] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1517.271364] env[69992]: value = "task-2898259" [ 1517.271364] env[69992]: _type = "Task" [ 1517.271364] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.278931] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898259, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.289607] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4db5022-03ef-4211-bb0a-5ec2a0e0dc4f tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.041s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1517.421902] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1517.422118] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.249s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1517.586461] env[69992]: DEBUG nova.objects.instance [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'flavor' on Instance uuid 90facf1a-ae81-4259-bf75-94779267699c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1517.640532] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898258, 'name': CreateVM_Task, 'duration_secs': 0.452989} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.640532] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1517.641667] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.641667] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1517.641667] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1517.641871] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be734a5a-7096-4539-a2c0-df1ec5bd7218 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.647382] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1517.647382] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52844dbb-5166-939f-71c2-19f22fafdbda" [ 1517.647382] env[69992]: _type = "Task" [ 1517.647382] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.657022] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52844dbb-5166-939f-71c2-19f22fafdbda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.737785] env[69992]: DEBUG nova.network.neutron [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Port 3319610f-82c5-4e8a-85bc-ec2d73b68ebe binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1517.780852] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898259, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072352} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.781206] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1517.781987] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70c1c12-3029-4a94-8656-2cbe4ef51099 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.806034] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] e7539d23-b4bb-48e8-89f4-ba98e6a12a01/e7539d23-b4bb-48e8-89f4-ba98e6a12a01.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1517.806263] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203d6833-e9f9-4724-86e4-67131265bf27 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.824936] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1517.824936] env[69992]: value = "task-2898260" [ 1517.824936] env[69992]: _type = "Task" [ 1517.824936] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.828616] env[69992]: DEBUG nova.network.neutron [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updated VIF entry in instance network info cache for port ba5a70da-7de9-4267-a3cf-1cdbabc9aa22. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1517.828980] env[69992]: DEBUG nova.network.neutron [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updating instance_info_cache with network_info: [{"id": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "address": "fa:16:3e:4f:a5:1f", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba5a70da-7d", "ovs_interfaceid": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.835329] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.092751] env[69992]: DEBUG oslo_concurrency.lockutils [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.093153] env[69992]: DEBUG oslo_concurrency.lockutils [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1518.093153] env[69992]: DEBUG nova.network.neutron [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.093234] env[69992]: DEBUG nova.objects.instance [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'info_cache' on Instance uuid 90facf1a-ae81-4259-bf75-94779267699c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1518.158327] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52844dbb-5166-939f-71c2-19f22fafdbda, 'name': SearchDatastore_Task, 'duration_secs': 0.05876} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.158653] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1518.158871] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.159120] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.159269] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1518.159480] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.159741] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e697870-64b4-4ead-8035-e381d1eae6d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.167868] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.168053] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.168731] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f081f1b1-91a3-4e39-96de-26a315b3f614 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.173386] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1518.173386] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52bae91d-8eb5-ea5d-3519-af2600b5a66c" [ 1518.173386] env[69992]: _type = "Task" [ 1518.173386] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.180676] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bae91d-8eb5-ea5d-3519-af2600b5a66c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.334824] env[69992]: DEBUG oslo_concurrency.lockutils [req-af7980de-28c1-4f11-8664-fd5af1f55160 req-489da097-b0b4-4a5d-b13e-c3a6a1f8548c service nova] Releasing lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1518.335226] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898260, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.423202] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1518.423443] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1518.423681] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1518.597020] env[69992]: DEBUG nova.objects.base [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Object Instance<90facf1a-ae81-4259-bf75-94779267699c> lazy-loaded attributes: flavor,info_cache {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1518.684366] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52bae91d-8eb5-ea5d-3519-af2600b5a66c, 'name': SearchDatastore_Task, 'duration_secs': 0.008394} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.685116] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477f5bd5-533d-4127-bcb9-314ed555c92c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.689927] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1518.689927] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523a23b6-f476-960f-8091-c0a60bb0d2fb" [ 1518.689927] env[69992]: _type = "Task" [ 1518.689927] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.697092] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523a23b6-f476-960f-8091-c0a60bb0d2fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.758152] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1518.758639] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1518.758639] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1518.834617] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898260, 'name': ReconfigVM_Task, 'duration_secs': 0.78245} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.834895] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Reconfigured VM instance instance-0000007a to attach disk [datastore1] e7539d23-b4bb-48e8-89f4-ba98e6a12a01/e7539d23-b4bb-48e8-89f4-ba98e6a12a01.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.835621] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73cd7f11-0d6e-4a7e-8aa2-7b36b85d6ce9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.841749] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1518.841749] env[69992]: value = "task-2898261" [ 1518.841749] env[69992]: _type = "Task" [ 1518.841749] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.849426] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898261, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.200138] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523a23b6-f476-960f-8091-c0a60bb0d2fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010281} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.200429] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1519.200592] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 6df2b739-02c0-40ac-b2a2-14587e3996bf/6df2b739-02c0-40ac-b2a2-14587e3996bf.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.200843] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-499c5061-3225-4d9a-8f06-bb167774a4e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.208025] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1519.208025] env[69992]: value = "task-2898262" [ 1519.208025] env[69992]: _type = "Task" [ 1519.208025] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.215290] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.294157] env[69992]: DEBUG nova.network.neutron [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.352233] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898261, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.717475] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465398} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.717739] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 6df2b739-02c0-40ac-b2a2-14587e3996bf/6df2b739-02c0-40ac-b2a2-14587e3996bf.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1519.718045] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1519.718331] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e57a0862-2cb6-4d2d-879d-983020d77d46 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.725649] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1519.725649] env[69992]: value = "task-2898263" [ 1519.725649] env[69992]: _type = "Task" [ 1519.725649] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.733975] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.796068] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.796303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1519.796518] env[69992]: DEBUG nova.network.neutron [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1519.797899] env[69992]: DEBUG oslo_concurrency.lockutils [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1519.852507] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898261, 'name': Rename_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.236055] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063757} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.236055] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1520.236672] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4480a96d-6498-48df-a363-2332c01a215d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.257983] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 6df2b739-02c0-40ac-b2a2-14587e3996bf/6df2b739-02c0-40ac-b2a2-14587e3996bf.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1520.258240] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80bb6670-7d51-4ac9-9b46-c11f6a7962b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.277904] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1520.277904] env[69992]: value = "task-2898264" [ 1520.277904] env[69992]: _type = "Task" [ 1520.277904] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.285207] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898264, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.351676] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898261, 'name': Rename_Task, 'duration_secs': 1.203134} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.351951] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1520.352209] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ace26fcf-1cca-4b60-a5ac-b36eb2991cc7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.358808] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1520.358808] env[69992]: value = "task-2898265" [ 1520.358808] env[69992]: _type = "Task" [ 1520.358808] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.370371] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.508604] env[69992]: DEBUG nova.network.neutron [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.789366] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.804429] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1520.804758] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9746162-190b-4d22-b3ba-d5b4acecbfae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.811872] env[69992]: DEBUG oslo_vmware.api [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1520.811872] env[69992]: value = "task-2898266" [ 1520.811872] env[69992]: _type = "Task" [ 1520.811872] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.820136] env[69992]: DEBUG oslo_vmware.api [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898266, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.870411] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898265, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.012108] env[69992]: DEBUG oslo_concurrency.lockutils [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1521.288059] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898264, 'name': ReconfigVM_Task, 'duration_secs': 0.610004} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.288433] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 6df2b739-02c0-40ac-b2a2-14587e3996bf/6df2b739-02c0-40ac-b2a2-14587e3996bf.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1521.288968] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c07d70ab-e11f-462c-9c11-89c47897a9b7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.295687] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1521.295687] env[69992]: value = "task-2898267" [ 1521.295687] env[69992]: _type = "Task" [ 1521.295687] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.305545] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898267, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.321345] env[69992]: DEBUG oslo_vmware.api [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898266, 'name': PowerOnVM_Task, 'duration_secs': 0.434659} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.321590] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1521.321791] env[69992]: DEBUG nova.compute.manager [None req-18140214-77ae-4032-a7ad-78b2deeef9f4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1521.322548] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e5615f-71a5-4102-be14-b9087a103b65 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.368965] env[69992]: DEBUG oslo_vmware.api [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898265, 'name': PowerOnVM_Task, 'duration_secs': 0.675125} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.369292] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1521.369597] env[69992]: INFO nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1521.369826] env[69992]: DEBUG nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1521.370573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee4c566-c203-4040-98a3-be1007b0f91c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.520744] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b3ec58-7f34-442f-ba64-90b8e3bc8586 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.527469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed385c73-ed86-4f01-a30a-f943b518f04d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.808806] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898267, 'name': Rename_Task, 'duration_secs': 0.44879} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.809211] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1521.809560] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04eedce7-8aff-4264-bbca-b50b33dda3e1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.816332] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1521.816332] env[69992]: value = "task-2898268" [ 1521.816332] env[69992]: _type = "Task" [ 1521.816332] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.826219] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.887785] env[69992]: INFO nova.compute.manager [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Took 14.60 seconds to build instance. [ 1522.326960] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898268, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.389372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-658a3261-0ba8-4fc7-91ac-d368bf5e246e tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.107s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1522.630400] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498393b4-8d3e-461c-8d26-5b45c3f73ef4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.649688] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7a8e9f-aa97-4f90-a428-73698f7af0ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.656706] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1522.826756] env[69992]: DEBUG oslo_vmware.api [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898268, 'name': PowerOnVM_Task, 'duration_secs': 0.833793} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.827074] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1522.827301] env[69992]: INFO nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Took 7.89 seconds to spawn the instance on the hypervisor. [ 1522.827486] env[69992]: DEBUG nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1522.828286] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e214a69d-f1b5-4b2c-8383-cf235744507d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.163517] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1523.163752] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd4c5dda-03c0-454f-a3d8-bd4aa929b503 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.171426] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1523.171426] env[69992]: value = "task-2898269" [ 1523.171426] env[69992]: _type = "Task" [ 1523.171426] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.179197] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.343875] env[69992]: INFO nova.compute.manager [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Took 14.01 seconds to build instance. [ 1523.685204] env[69992]: DEBUG oslo_vmware.api [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898269, 'name': PowerOnVM_Task, 'duration_secs': 0.395142} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.685204] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1523.685204] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-604d995b-f773-42b3-aef5-af5952d667fd tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance '7b549cd4-bfdc-45c5-9031-9b378ad7ee79' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1523.703891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1523.703891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1523.703891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1523.704195] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1523.705023] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1523.706500] env[69992]: INFO nova.compute.manager [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Terminating instance [ 1523.845841] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ed066ed4-dd46-4c82-8281-ec9fc3381257 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.525s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1523.870180] env[69992]: DEBUG nova.compute.manager [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Received event network-changed-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1523.870501] env[69992]: DEBUG nova.compute.manager [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Refreshing instance network info cache due to event network-changed-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1523.870798] env[69992]: DEBUG oslo_concurrency.lockutils [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] Acquiring lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.871014] env[69992]: DEBUG oslo_concurrency.lockutils [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] Acquired lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1523.871262] env[69992]: DEBUG nova.network.neutron [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Refreshing network info cache for port ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1524.210213] env[69992]: DEBUG nova.compute.manager [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1524.210774] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1524.211712] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6025c7f8-a74a-4ea6-930d-9345403f2b2e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.220025] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1524.220144] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f1c9d2a-b4d6-45da-bafa-d07a17fd6549 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.226234] env[69992]: DEBUG oslo_vmware.api [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1524.226234] env[69992]: value = "task-2898270" [ 1524.226234] env[69992]: _type = "Task" [ 1524.226234] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.239771] env[69992]: DEBUG oslo_vmware.api [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898270, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.590828] env[69992]: DEBUG nova.network.neutron [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updated VIF entry in instance network info cache for port ba5a70da-7de9-4267-a3cf-1cdbabc9aa22. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1524.591370] env[69992]: DEBUG nova.network.neutron [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updating instance_info_cache with network_info: [{"id": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "address": "fa:16:3e:4f:a5:1f", "network": {"id": "7b76ab15-e15a-4e22-ba38-bffeba7c4ff6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1461551189-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4da04b8933ad4d2ba4b1c193853f31b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba5a70da-7d", "ovs_interfaceid": "ba5a70da-7de9-4267-a3cf-1cdbabc9aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.739517] env[69992]: DEBUG oslo_vmware.api [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898270, 'name': PowerOffVM_Task, 'duration_secs': 0.407059} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.739773] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1524.739925] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1524.740390] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9edb57a-65f9-40c2-ad24-4d1cd27acbd7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.834712] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1524.834968] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1524.835133] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Deleting the datastore file [datastore1] e7539d23-b4bb-48e8-89f4-ba98e6a12a01 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1524.835391] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03f2cf53-8b95-46a1-b338-6837702ab1e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.842731] env[69992]: DEBUG oslo_vmware.api [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for the task: (returnval){ [ 1524.842731] env[69992]: value = "task-2898272" [ 1524.842731] env[69992]: _type = "Task" [ 1524.842731] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.850267] env[69992]: DEBUG oslo_vmware.api [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898272, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.094031] env[69992]: DEBUG oslo_concurrency.lockutils [req-60d52743-4c9a-4a5c-b17e-285cce63f572 req-9157be1e-fe52-40d9-a83e-39e79df07bb5 service nova] Releasing lock "refresh_cache-6df2b739-02c0-40ac-b2a2-14587e3996bf" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1525.354237] env[69992]: DEBUG oslo_vmware.api [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Task: {'id': task-2898272, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379283} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.354525] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1525.354783] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1525.355055] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1525.355273] env[69992]: INFO nova.compute.manager [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1525.355719] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1525.355854] env[69992]: DEBUG nova.compute.manager [-] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1525.355921] env[69992]: DEBUG nova.network.neutron [-] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1525.896498] env[69992]: DEBUG nova.compute.manager [req-b33b389a-5729-4d88-aafd-f7aae8f834b6 req-336e57d9-898e-4ea4-b6db-edf9093f082a service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Received event network-vif-deleted-bd85f0c5-e04c-4955-b2f0-952380a45baa {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1525.896819] env[69992]: INFO nova.compute.manager [req-b33b389a-5729-4d88-aafd-f7aae8f834b6 req-336e57d9-898e-4ea4-b6db-edf9093f082a service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Neutron deleted interface bd85f0c5-e04c-4955-b2f0-952380a45baa; detaching it from the instance and deleting it from the info cache [ 1525.897040] env[69992]: DEBUG nova.network.neutron [req-b33b389a-5729-4d88-aafd-f7aae8f834b6 req-336e57d9-898e-4ea4-b6db-edf9093f082a service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.108375] env[69992]: DEBUG nova.network.neutron [-] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.252351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1526.252633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1526.252812] env[69992]: DEBUG nova.compute.manager [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Going to confirm migration 8 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1526.400068] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01c9a80b-68c6-4b2a-8fdd-c81b9644e266 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.410574] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024ae5f8-de54-42ed-9355-b3370ad9df7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.440494] env[69992]: DEBUG nova.compute.manager [req-b33b389a-5729-4d88-aafd-f7aae8f834b6 req-336e57d9-898e-4ea4-b6db-edf9093f082a service nova] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Detach interface failed, port_id=bd85f0c5-e04c-4955-b2f0-952380a45baa, reason: Instance e7539d23-b4bb-48e8-89f4-ba98e6a12a01 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1526.611024] env[69992]: INFO nova.compute.manager [-] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Took 1.25 seconds to deallocate network for instance. [ 1526.819333] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.819523] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquired lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1526.819703] env[69992]: DEBUG nova.network.neutron [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1526.819888] env[69992]: DEBUG nova.objects.instance [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'info_cache' on Instance uuid 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.118207] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1527.118581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1527.118686] env[69992]: DEBUG nova.objects.instance [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lazy-loading 'resources' on Instance uuid e7539d23-b4bb-48e8-89f4-ba98e6a12a01 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.711572] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43857249-6ce3-467f-a9d7-25d24e31c67c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.719034] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4136f680-d19b-4418-9325-587ddec620ee {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.748768] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dabaa6-c7dc-44ae-a433-b6242f3cc9a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.755748] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cebe51a-6364-4caf-8b62-dfdc05c5c74a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.769805] env[69992]: DEBUG nova.compute.provider_tree [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.027088] env[69992]: DEBUG nova.network.neutron [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [{"id": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "address": "fa:16:3e:ed:37:96", "network": {"id": "107a28a2-7647-4953-abac-1246b892511f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1382825212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ab89c6cf054418a4dd1a0e61b3a5e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46785c9c-8b22-487d-a854-b3e67c5ed1d7", "external-id": "nsx-vlan-transportzone-430", "segmentation_id": 430, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3319610f-82", "ovs_interfaceid": "3319610f-82c5-4e8a-85bc-ec2d73b68ebe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.273388] env[69992]: DEBUG nova.scheduler.client.report [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1528.529729] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Releasing lock "refresh_cache-7b549cd4-bfdc-45c5-9031-9b378ad7ee79" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1528.530020] env[69992]: DEBUG nova.objects.instance [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'migration_context' on Instance uuid 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1528.778496] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1528.799186] env[69992]: INFO nova.scheduler.client.report [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Deleted allocations for instance e7539d23-b4bb-48e8-89f4-ba98e6a12a01 [ 1529.032887] env[69992]: DEBUG nova.objects.base [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Object Instance<7b549cd4-bfdc-45c5-9031-9b378ad7ee79> lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1529.033840] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f3617d-d9bf-4ed6-bcc4-60bbc5a51079 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.052896] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f059157b-eddc-407f-9da9-d4d521dec6de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.058451] env[69992]: DEBUG oslo_vmware.api [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1529.058451] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524c7728-39a3-e5cb-ad19-7044fd0ddd66" [ 1529.058451] env[69992]: _type = "Task" [ 1529.058451] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.065723] env[69992]: DEBUG oslo_vmware.api [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524c7728-39a3-e5cb-ad19-7044fd0ddd66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.306997] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c18ec69e-2c44-4445-9b14-772fa8d7cc61 tempest-ServerMetadataTestJSON-2115374873 tempest-ServerMetadataTestJSON-2115374873-project-member] Lock "e7539d23-b4bb-48e8-89f4-ba98e6a12a01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.603s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1529.568975] env[69992]: DEBUG oslo_vmware.api [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524c7728-39a3-e5cb-ad19-7044fd0ddd66, 'name': SearchDatastore_Task, 'duration_secs': 0.008889} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.569232] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1529.569479] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1529.605934] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.179343] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e985b76b-323a-4de9-acf6-99555abc1c3b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.186907] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3652f74-e312-4c30-8f43-1f82fa739467 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.219166] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db3a28e-94a4-4ba8-8285-23095d47a656 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.226588] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af74735f-2936-431a-b1ea-5cd64d00580d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.239970] env[69992]: DEBUG nova.compute.provider_tree [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.744024] env[69992]: DEBUG nova.scheduler.client.report [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1531.755224] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.185s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1532.320064] env[69992]: INFO nova.scheduler.client.report [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted allocation for migration 685133dc-8ed1-4552-b3aa-faf26d22ebb4 [ 1532.596318] env[69992]: INFO nova.compute.manager [None req-118dd373-642b-4b29-bf7f-fdb831e9c0eb tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Get console output [ 1532.596532] env[69992]: WARNING nova.virt.vmwareapi.driver [None req-118dd373-642b-4b29-bf7f-fdb831e9c0eb tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] The console log is missing. Check your VSPC configuration [ 1532.826072] env[69992]: DEBUG oslo_concurrency.lockutils [None req-96ffe704-3332-491b-af1c-1b11dbaa0d7f tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.573s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1558.069054] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1558.069054] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1558.069054] env[69992]: INFO nova.compute.manager [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Rebooting instance [ 1558.586020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.586215] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1558.586404] env[69992]: DEBUG nova.network.neutron [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.281053] env[69992]: DEBUG nova.network.neutron [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.504054] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.504290] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.504542] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.504741] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.504910] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1559.508556] env[69992]: INFO nova.compute.manager [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Terminating instance [ 1559.783816] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1560.012496] env[69992]: DEBUG nova.compute.manager [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1560.012747] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1560.013044] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3091c08f-8e2e-4a39-b61d-daa2609aaab6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.021144] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1560.021144] env[69992]: value = "task-2898273" [ 1560.021144] env[69992]: _type = "Task" [ 1560.021144] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.030896] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.288483] env[69992]: DEBUG nova.compute.manager [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1560.289353] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693efb63-f7cd-4f6a-ba7b-ccfc6f68c5fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.526819] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1560.526819] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1560.533572] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898273, 'name': PowerOffVM_Task, 'duration_secs': 0.175079} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.533812] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1560.534015] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1560.534215] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582150', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'name': 'volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '7b549cd4-bfdc-45c5-9031-9b378ad7ee79', 'attached_at': '2025-03-10T17:57:10.000000', 'detached_at': '', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'serial': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1560.535441] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45210357-d154-46f6-aff4-efcc72cb86e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.554138] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594f51a9-e405-4814-be44-dd1bb8fb697c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.560091] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5a6770-3f72-4b43-bdd8-2dcfed943aa5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.577399] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee8f9a5-fb65-4ab2-9c6e-27d7b435ce35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.591448] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] The volume has not been displaced from its original location: [datastore1] volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f/volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1560.596617] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1560.596873] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6a51654-a2ad-4f2a-80ed-1195ca008f78 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.614416] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1560.614416] env[69992]: value = "task-2898274" [ 1560.614416] env[69992]: _type = "Task" [ 1560.614416] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.621662] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898274, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.029611] env[69992]: DEBUG nova.compute.utils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1561.123485] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898274, 'name': ReconfigVM_Task, 'duration_secs': 0.160576} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.123779] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1561.128219] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb02cb16-5273-4ae8-a821-6e11980864be {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.142969] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1561.142969] env[69992]: value = "task-2898275" [ 1561.142969] env[69992]: _type = "Task" [ 1561.142969] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.150148] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.304058] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121797cd-aaa4-4f0c-8186-6a68d877a03e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.311312] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Doing hard reboot of VM {{(pid=69992) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1561.311537] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-ca5615a8-24a3-4e82-91f4-2a18d45a0fbf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.318487] env[69992]: DEBUG oslo_vmware.api [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1561.318487] env[69992]: value = "task-2898276" [ 1561.318487] env[69992]: _type = "Task" [ 1561.318487] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.326989] env[69992]: DEBUG oslo_vmware.api [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898276, 'name': ResetVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.533180] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1561.653051] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898275, 'name': ReconfigVM_Task, 'duration_secs': 0.130528} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.653358] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582150', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'name': 'volume-b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '7b549cd4-bfdc-45c5-9031-9b378ad7ee79', 'attached_at': '2025-03-10T17:57:10.000000', 'detached_at': '', 'volume_id': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f', 'serial': 'b76a65d3-3712-42c5-a9d4-c35d2046ba1f'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1561.653670] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1561.654502] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe776a7d-7742-45f3-b7c3-8fd7b117b7c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.660844] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1561.661079] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b252c9a-3137-4045-a9c7-143528500b83 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.725254] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1561.725448] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1561.725625] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore1] 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1561.725888] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd5093c9-f4fc-4f67-87f9-64129e0fa853 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.732249] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1561.732249] env[69992]: value = "task-2898278" [ 1561.732249] env[69992]: _type = "Task" [ 1561.732249] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.739735] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.827286] env[69992]: DEBUG oslo_vmware.api [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898276, 'name': ResetVM_Task, 'duration_secs': 0.096908} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.827465] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Did hard reboot of VM {{(pid=69992) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1561.827663] env[69992]: DEBUG nova.compute.manager [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1561.828443] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27db75f1-187a-4c9f-81c2-4217d8cecafc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.241436] env[69992]: DEBUG oslo_vmware.api [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085727} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.241688] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1562.241876] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1562.242064] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1562.242245] env[69992]: INFO nova.compute.manager [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1562.242479] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1562.242699] env[69992]: DEBUG nova.compute.manager [-] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1562.242801] env[69992]: DEBUG nova.network.neutron [-] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1562.342575] env[69992]: DEBUG oslo_concurrency.lockutils [None req-82da1dee-6b68-4f35-9385-3284b0593443 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.273s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1562.588900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1562.589216] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1562.589466] env[69992]: INFO nova.compute.manager [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Attaching volume 55a16181-a822-45b3-a00b-1e7cbd8a01b8 to /dev/sdb [ 1562.624808] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67114af-2498-4d81-ba0b-f4071176c909 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.631968] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2401c0fe-7856-46b1-9add-ae77a2c9c259 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.647447] env[69992]: DEBUG nova.virt.block_device [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updating existing volume attachment record: 51225449-e01a-4177-968a-83faa5cb5f3f {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1562.715811] env[69992]: DEBUG nova.compute.manager [req-c869acc8-ffff-4c5f-8b85-28ee134824b9 req-c55e658b-a423-4c2b-8325-97e7fdcb00c3 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Received event network-vif-deleted-3319610f-82c5-4e8a-85bc-ec2d73b68ebe {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1562.715811] env[69992]: INFO nova.compute.manager [req-c869acc8-ffff-4c5f-8b85-28ee134824b9 req-c55e658b-a423-4c2b-8325-97e7fdcb00c3 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Neutron deleted interface 3319610f-82c5-4e8a-85bc-ec2d73b68ebe; detaching it from the instance and deleting it from the info cache [ 1562.716193] env[69992]: DEBUG nova.network.neutron [req-c869acc8-ffff-4c5f-8b85-28ee134824b9 req-c55e658b-a423-4c2b-8325-97e7fdcb00c3 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.192041] env[69992]: DEBUG nova.network.neutron [-] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.218710] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-447c4c0c-1e5c-4974-8a79-65cc43af9e3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.227752] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0786010c-f957-4711-8dd4-5c43170edffa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.255567] env[69992]: DEBUG nova.compute.manager [req-c869acc8-ffff-4c5f-8b85-28ee134824b9 req-c55e658b-a423-4c2b-8325-97e7fdcb00c3 service nova] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Detach interface failed, port_id=3319610f-82c5-4e8a-85bc-ec2d73b68ebe, reason: Instance 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1563.694444] env[69992]: INFO nova.compute.manager [-] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Took 1.45 seconds to deallocate network for instance. [ 1564.235305] env[69992]: INFO nova.compute.manager [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Took 0.54 seconds to detach 1 volumes for instance. [ 1564.238073] env[69992]: DEBUG nova.compute.manager [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Deleting volume: b76a65d3-3712-42c5-a9d4-c35d2046ba1f {{(pid=69992) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1564.785033] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1564.785393] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1564.785511] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1564.812492] env[69992]: INFO nova.scheduler.client.report [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted allocations for instance 7b549cd4-bfdc-45c5-9031-9b378ad7ee79 [ 1565.320462] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1ce4bdc3-aea8-47f0-a134-eca1f6e23b45 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "7b549cd4-bfdc-45c5-9031-9b378ad7ee79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.816s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1566.135216] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1566.135540] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1566.135697] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1566.135885] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1566.136092] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1566.138217] env[69992]: INFO nova.compute.manager [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Terminating instance [ 1566.642398] env[69992]: DEBUG nova.compute.manager [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1566.642584] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1566.643539] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035da912-6c1b-455c-9fc3-6d442eeb757f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.651533] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.651776] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bd7c0f4-3b56-459d-92c9-05742553b1a0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.657628] env[69992]: DEBUG oslo_vmware.api [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1566.657628] env[69992]: value = "task-2898282" [ 1566.657628] env[69992]: _type = "Task" [ 1566.657628] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.665486] env[69992]: DEBUG oslo_vmware.api [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.167655] env[69992]: DEBUG oslo_vmware.api [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898282, 'name': PowerOffVM_Task, 'duration_secs': 0.18238} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.168058] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1567.168058] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1567.168293] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-113d24f0-c7a1-4d6e-8490-1be523747da6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.191725] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1567.191949] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582159', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'name': 'volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6df2b739-02c0-40ac-b2a2-14587e3996bf', 'attached_at': '', 'detached_at': '', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'serial': '55a16181-a822-45b3-a00b-1e7cbd8a01b8'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1567.192714] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b467d5-cd51-4724-b066-3d1fc666b8ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.208226] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b398717-32c9-4a26-a95b-41feef4dc883 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.232443] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8/volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1567.233734] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbdf9108-7080-46c9-b633-98eec81476c8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.246273] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1567.246465] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1567.246640] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore2] 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.246902] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d001b202-ffe2-4f3b-9012-fed6cde7db6e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.252345] env[69992]: DEBUG oslo_vmware.api [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1567.252345] env[69992]: value = "task-2898284" [ 1567.252345] env[69992]: _type = "Task" [ 1567.252345] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.256174] env[69992]: DEBUG oslo_vmware.api [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1567.256174] env[69992]: value = "task-2898285" [ 1567.256174] env[69992]: _type = "Task" [ 1567.256174] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.262166] env[69992]: DEBUG oslo_vmware.api [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.266680] env[69992]: DEBUG oslo_vmware.api [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.763542] env[69992]: DEBUG oslo_vmware.api [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151886} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.764121] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.764313] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1567.764496] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1567.764680] env[69992]: INFO nova.compute.manager [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1567.764942] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1567.765147] env[69992]: DEBUG nova.compute.manager [-] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1567.765242] env[69992]: DEBUG nova.network.neutron [-] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1567.769342] env[69992]: DEBUG oslo_vmware.api [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898285, 'name': ReconfigVM_Task, 'duration_secs': 0.355008} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.769890] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Reconfigured VM instance instance-0000007b to attach disk [datastore2] volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8/volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1567.774485] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-152b0d53-a8c1-4f67-988b-d4c7a110513a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.790090] env[69992]: DEBUG oslo_vmware.api [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1567.790090] env[69992]: value = "task-2898286" [ 1567.790090] env[69992]: _type = "Task" [ 1567.790090] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.802300] env[69992]: DEBUG oslo_vmware.api [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898286, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.023332] env[69992]: DEBUG nova.compute.manager [req-3d22bf50-51ce-44b8-88ef-f63e0527abd6 req-dc3ca9a8-b051-4f05-9b43-63eb9f7bb47f service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Received event network-vif-deleted-b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1568.023565] env[69992]: INFO nova.compute.manager [req-3d22bf50-51ce-44b8-88ef-f63e0527abd6 req-dc3ca9a8-b051-4f05-9b43-63eb9f7bb47f service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Neutron deleted interface b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8; detaching it from the instance and deleting it from the info cache [ 1568.023771] env[69992]: DEBUG nova.network.neutron [req-3d22bf50-51ce-44b8-88ef-f63e0527abd6 req-dc3ca9a8-b051-4f05-9b43-63eb9f7bb47f service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.299661] env[69992]: DEBUG oslo_vmware.api [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898286, 'name': ReconfigVM_Task, 'duration_secs': 0.182525} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.300043] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582159', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'name': 'volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6df2b739-02c0-40ac-b2a2-14587e3996bf', 'attached_at': '', 'detached_at': '', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'serial': '55a16181-a822-45b3-a00b-1e7cbd8a01b8'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1568.506284] env[69992]: DEBUG nova.network.neutron [-] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.526448] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a18e5fd3-3e40-4c89-bb22-af3fa0e3f8a7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.536169] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893f2964-101a-4c49-9469-3bd530747576 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.564985] env[69992]: DEBUG nova.compute.manager [req-3d22bf50-51ce-44b8-88ef-f63e0527abd6 req-dc3ca9a8-b051-4f05-9b43-63eb9f7bb47f service nova] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Detach interface failed, port_id=b710419b-6fa9-4a79-b4cc-69ecc5cbb8a8, reason: Instance 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1569.009371] env[69992]: INFO nova.compute.manager [-] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Took 1.24 seconds to deallocate network for instance. [ 1569.335019] env[69992]: DEBUG nova.objects.instance [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'flavor' on Instance uuid 6df2b739-02c0-40ac-b2a2-14587e3996bf {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1569.516409] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1569.516706] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1569.517015] env[69992]: DEBUG nova.objects.instance [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'resources' on Instance uuid 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1569.840317] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b7b607e0-de5e-412b-8f1c-e2ae86a01d33 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.251s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1570.079114] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5500f2aa-71c4-4cf1-ad4b-990878b7d19f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.087688] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d9551d-e75e-4bcb-95be-66d004d43a68 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.116227] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1570.116418] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1570.119040] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6136368-5d2c-441b-9a19-906a3c33773a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.125678] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54e68a0-0890-4009-ae9c-dd6a0d46a792 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.139384] env[69992]: DEBUG nova.compute.provider_tree [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.620029] env[69992]: INFO nova.compute.manager [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Detaching volume 55a16181-a822-45b3-a00b-1e7cbd8a01b8 [ 1570.642037] env[69992]: DEBUG nova.scheduler.client.report [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1570.652234] env[69992]: INFO nova.virt.block_device [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Attempting to driver detach volume 55a16181-a822-45b3-a00b-1e7cbd8a01b8 from mountpoint /dev/sdb [ 1570.652234] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1570.652234] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582159', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'name': 'volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6df2b739-02c0-40ac-b2a2-14587e3996bf', 'attached_at': '', 'detached_at': '', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'serial': '55a16181-a822-45b3-a00b-1e7cbd8a01b8'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1570.652985] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9776cde9-9b56-4037-a7d8-75c23443f4c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.675486] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e054cf8-e81c-48d4-893a-98c1eddf39c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.682071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb8d66a-e467-4e2d-b140-2a05fc34e852 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.703813] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1895a1c5-6cee-4e98-8fd7-c0e384a98b4d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.718067] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] The volume has not been displaced from its original location: [datastore2] volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8/volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1570.723230] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1570.723696] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afdcc73f-ec64-455e-a0d3-91419467c998 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.740698] env[69992]: DEBUG oslo_vmware.api [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1570.740698] env[69992]: value = "task-2898287" [ 1570.740698] env[69992]: _type = "Task" [ 1570.740698] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.747962] env[69992]: DEBUG oslo_vmware.api [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.146585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1571.167827] env[69992]: INFO nova.scheduler.client.report [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted allocations for instance 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc [ 1571.254236] env[69992]: DEBUG oslo_vmware.api [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898287, 'name': ReconfigVM_Task, 'duration_secs': 0.216496} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.254605] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1571.262012] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cf9ce7d-9373-4053-b107-0840c43c88d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.283075] env[69992]: DEBUG oslo_vmware.api [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1571.283075] env[69992]: value = "task-2898288" [ 1571.283075] env[69992]: _type = "Task" [ 1571.283075] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.290378] env[69992]: DEBUG oslo_vmware.api [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898288, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.675346] env[69992]: DEBUG oslo_concurrency.lockutils [None req-c54f1826-2b42-4a4f-8977-3e89b98edcd1 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.540s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1571.792504] env[69992]: DEBUG oslo_vmware.api [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898288, 'name': ReconfigVM_Task, 'duration_secs': 0.132493} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.792800] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582159', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'name': 'volume-55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6df2b739-02c0-40ac-b2a2-14587e3996bf', 'attached_at': '', 'detached_at': '', 'volume_id': '55a16181-a822-45b3-a00b-1e7cbd8a01b8', 'serial': '55a16181-a822-45b3-a00b-1e7cbd8a01b8'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1572.334062] env[69992]: DEBUG nova.objects.instance [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'flavor' on Instance uuid 6df2b739-02c0-40ac-b2a2-14587e3996bf {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1572.609788] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.340503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-42fa0cad-a670-4f2c-a887-1e3ba9d9d323 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.224s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1573.610018] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.336766] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1574.337085] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1574.337312] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "6df2b739-02c0-40ac-b2a2-14587e3996bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1574.337499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1574.337670] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1574.339770] env[69992]: INFO nova.compute.manager [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Terminating instance [ 1574.605821] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.608792] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.843278] env[69992]: DEBUG nova.compute.manager [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1574.843469] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1574.844448] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98ce440-4fea-44a8-a7ab-0f4ff4aca863 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.852246] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1574.852465] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f77c0612-323b-49ac-92c5-7bd619151648 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.858550] env[69992]: DEBUG oslo_vmware.api [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1574.858550] env[69992]: value = "task-2898290" [ 1574.858550] env[69992]: _type = "Task" [ 1574.858550] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.866268] env[69992]: DEBUG oslo_vmware.api [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.369117] env[69992]: DEBUG oslo_vmware.api [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898290, 'name': PowerOffVM_Task, 'duration_secs': 0.201409} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.369458] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1575.369652] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1575.370191] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a1eb1d0-b8b7-4091-a709-4623deb296c0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.437736] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1575.437985] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1575.438229] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleting the datastore file [datastore1] 6df2b739-02c0-40ac-b2a2-14587e3996bf {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1575.438503] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4d2c141-eb64-4c86-b830-26bf7b909d8d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.444659] env[69992]: DEBUG oslo_vmware.api [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for the task: (returnval){ [ 1575.444659] env[69992]: value = "task-2898292" [ 1575.444659] env[69992]: _type = "Task" [ 1575.444659] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.452128] env[69992]: DEBUG oslo_vmware.api [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898292, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.609919] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1575.610332] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1575.610573] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1575.610831] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1575.829970] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.830205] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.830420] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.830610] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.830779] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1575.833096] env[69992]: INFO nova.compute.manager [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Terminating instance [ 1575.954989] env[69992]: DEBUG oslo_vmware.api [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Task: {'id': task-2898292, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133809} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.955275] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1575.955465] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1575.955651] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1575.955827] env[69992]: INFO nova.compute.manager [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1575.956087] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1575.956285] env[69992]: DEBUG nova.compute.manager [-] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1575.956397] env[69992]: DEBUG nova.network.neutron [-] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1576.114952] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1576.117024] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.002s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1576.117024] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1576.117024] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1576.118381] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2443cb1b-faa6-4a8f-932b-90aba790a739 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.129991] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661b1309-09d8-4c3d-99cc-e50d28c9b781 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.153336] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f42c800-a54e-41b0-9d93-12c98a85149d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.162644] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffce7e92-adaf-4ef0-93f3-a95073d3d55d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.210148] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180206MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1576.210427] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1576.210775] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1576.336782] env[69992]: DEBUG nova.compute.manager [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1576.337015] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1576.337954] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf98d7c-b5a0-4c02-9cbf-599d7ea13ff1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.347711] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1576.347957] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09a22cdb-9aea-4db3-9f69-42347e0f475a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.356090] env[69992]: DEBUG oslo_vmware.api [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1576.356090] env[69992]: value = "task-2898293" [ 1576.356090] env[69992]: _type = "Task" [ 1576.356090] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.364598] env[69992]: DEBUG oslo_vmware.api [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.410331] env[69992]: DEBUG nova.compute.manager [req-33b09bb8-4112-42a7-a02d-d130aab3653a req-9308cfd7-7ea6-46e0-941c-cd7a708fb638 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Received event network-vif-deleted-ba5a70da-7de9-4267-a3cf-1cdbabc9aa22 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1576.410331] env[69992]: INFO nova.compute.manager [req-33b09bb8-4112-42a7-a02d-d130aab3653a req-9308cfd7-7ea6-46e0-941c-cd7a708fb638 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Neutron deleted interface ba5a70da-7de9-4267-a3cf-1cdbabc9aa22; detaching it from the instance and deleting it from the info cache [ 1576.410331] env[69992]: DEBUG nova.network.neutron [req-33b09bb8-4112-42a7-a02d-d130aab3653a req-9308cfd7-7ea6-46e0-941c-cd7a708fb638 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.866040] env[69992]: DEBUG oslo_vmware.api [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898293, 'name': PowerOffVM_Task, 'duration_secs': 0.20055} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.866363] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1576.866672] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1576.867019] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e736028e-5c8c-41e7-8686-6ff136dd3621 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.886012] env[69992]: DEBUG nova.network.neutron [-] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.913081] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1afb68dd-2713-492a-b58a-339b96d0c3e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.924166] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43b125a-4c74-49db-a270-f8afc4b1fcf6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.950620] env[69992]: DEBUG nova.compute.manager [req-33b09bb8-4112-42a7-a02d-d130aab3653a req-9308cfd7-7ea6-46e0-941c-cd7a708fb638 service nova] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Detach interface failed, port_id=ba5a70da-7de9-4267-a3cf-1cdbabc9aa22, reason: Instance 6df2b739-02c0-40ac-b2a2-14587e3996bf could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1576.970624] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1576.970624] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1576.970624] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleting the datastore file [datastore2] 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1576.970840] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-375fa708-bbfb-4327-83c2-e74844f132ec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.976824] env[69992]: DEBUG oslo_vmware.api [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for the task: (returnval){ [ 1576.976824] env[69992]: value = "task-2898295" [ 1576.976824] env[69992]: _type = "Task" [ 1576.976824] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.984354] env[69992]: DEBUG oslo_vmware.api [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.237059] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1577.237357] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 90facf1a-ae81-4259-bf75-94779267699c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1577.237525] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 6df2b739-02c0-40ac-b2a2-14587e3996bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1577.237740] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1577.237909] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1577.285894] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a788840b-06fe-4559-b5f3-2049baee34d4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.293553] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8a2655-80c6-4424-9fbd-1c3a9d1cf48d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.323681] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9433fb47-257d-4a45-abe0-c5aadada0a77 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.331117] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0ad436-2659-43db-81ad-af6a7b184bf7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.345549] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.389081] env[69992]: INFO nova.compute.manager [-] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Took 1.43 seconds to deallocate network for instance. [ 1577.486806] env[69992]: DEBUG oslo_vmware.api [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Task: {'id': task-2898295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143922} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.487115] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1577.487318] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1577.487496] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1577.487669] env[69992]: INFO nova.compute.manager [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1577.487901] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1577.488740] env[69992]: DEBUG nova.compute.manager [-] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1577.488740] env[69992]: DEBUG nova.network.neutron [-] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1577.849110] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1577.895821] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1578.244409] env[69992]: DEBUG nova.network.neutron [-] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.353962] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1578.354160] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.143s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1578.354422] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.459s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1578.354645] env[69992]: DEBUG nova.objects.instance [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lazy-loading 'resources' on Instance uuid 6df2b739-02c0-40ac-b2a2-14587e3996bf {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1578.437249] env[69992]: DEBUG nova.compute.manager [req-5f0aa9b2-a919-4648-a259-72fed23bbe24 req-f8cda776-0576-40b9-98d4-7930149a56f0 service nova] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Received event network-vif-deleted-ebc337ca-1f7f-449a-85a1-1af599dd4a19 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1578.747187] env[69992]: INFO nova.compute.manager [-] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Took 1.26 seconds to deallocate network for instance. [ 1578.906325] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e5a025-1324-4749-b7b5-cc97cacf1c84 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.915619] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774271a6-9437-425c-b8f0-a9694b4e94c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.948282] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f319605-1210-489a-821e-be240e50bc54 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.956240] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c05aef-8c6c-491c-aaa4-417c6eba2852 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.969956] env[69992]: DEBUG nova.compute.provider_tree [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.253591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1579.473198] env[69992]: DEBUG nova.scheduler.client.report [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1579.978561] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1579.980948] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.727s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1579.981195] env[69992]: DEBUG nova.objects.instance [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lazy-loading 'resources' on Instance uuid 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1580.000157] env[69992]: INFO nova.scheduler.client.report [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Deleted allocations for instance 6df2b739-02c0-40ac-b2a2-14587e3996bf [ 1580.508045] env[69992]: DEBUG oslo_concurrency.lockutils [None req-62b858cb-63a6-40a2-bd45-6c8b9705a868 tempest-AttachVolumeNegativeTest-821716976 tempest-AttachVolumeNegativeTest-821716976-project-member] Lock "6df2b739-02c0-40ac-b2a2-14587e3996bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.171s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1580.523153] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1dc5e7-f96c-4b98-91b8-f11e9fbed7ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.532085] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532c4aa7-6006-4485-9f0e-c013951346ed {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.560938] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17565533-27cb-417d-92b5-7fdd6d7e5917 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.568795] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171e3b07-68c4-41b4-bf3c-32e3c4dd330e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.581558] env[69992]: DEBUG nova.compute.provider_tree [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.084837] env[69992]: DEBUG nova.scheduler.client.report [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1581.589814] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1581.611308] env[69992]: INFO nova.scheduler.client.report [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Deleted allocations for instance 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb [ 1582.118793] env[69992]: DEBUG oslo_concurrency.lockutils [None req-3dad3757-06db-4243-9007-9131c4f4a022 tempest-ServerActionsTestOtherA-414793530 tempest-ServerActionsTestOtherA-414793530-project-member] Lock "88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.288s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1582.355405] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.048671] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "426efc79-924b-41bf-a890-2d27b4e862ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1594.048989] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "426efc79-924b-41bf-a890-2d27b4e862ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1594.551833] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1595.070275] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1595.070548] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1595.071972] env[69992]: INFO nova.compute.claims [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.115570] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580f5030-d5fe-45ed-bdf8-f264898a9582 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.123012] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bc4f49-8bfc-49ce-a651-d8f685e930fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.154224] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45dd7f8d-aacd-4a89-b48c-667a232a091e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.161122] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59635356-d0b1-48d1-9598-50fa77d48481 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.173531] env[69992]: DEBUG nova.compute.provider_tree [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1596.693653] env[69992]: ERROR nova.scheduler.client.report [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [req-65a9e377-8252-47b3-b172-ff3bbb6fa8f1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-65a9e377-8252-47b3-b172-ff3bbb6fa8f1"}]} [ 1596.708646] env[69992]: DEBUG nova.scheduler.client.report [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1596.719660] env[69992]: DEBUG nova.scheduler.client.report [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1596.719897] env[69992]: DEBUG nova.compute.provider_tree [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1596.729815] env[69992]: DEBUG nova.scheduler.client.report [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1596.745420] env[69992]: DEBUG nova.scheduler.client.report [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1596.777054] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c01020-504e-4823-93a8-46336e55baa1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.784362] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1590a4ab-0954-41aa-b50f-ae7bde825978 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.814413] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a122c83b-acf4-4588-9fa1-f301324e3444 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.821075] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3104e1eb-02ad-4171-8c91-77672d8ea45a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.833528] env[69992]: DEBUG nova.compute.provider_tree [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1597.364282] env[69992]: DEBUG nova.scheduler.client.report [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 180 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1597.364609] env[69992]: DEBUG nova.compute.provider_tree [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 180 to 181 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1597.364708] env[69992]: DEBUG nova.compute.provider_tree [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1597.868972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1597.869539] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1598.375382] env[69992]: DEBUG nova.compute.utils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1598.376845] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1598.377029] env[69992]: DEBUG nova.network.neutron [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1598.423525] env[69992]: DEBUG nova.policy [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99bd7545f7d04aa28e625ce6c5491bb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '122bc9ffa8f54a34af6047517fab0a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1598.706473] env[69992]: DEBUG nova.network.neutron [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Successfully created port: c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1598.880847] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1599.891423] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1599.919445] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1599.919536] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1599.919700] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1599.919928] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1599.920098] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1599.920253] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1599.920468] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1599.920629] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1599.920794] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1599.920958] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1599.921182] env[69992]: DEBUG nova.virt.hardware [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1599.922056] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cbc1b2-937e-4729-9abc-77d9327724c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.929892] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679e874f-7736-49cd-8333-35b3c028172e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.051584] env[69992]: DEBUG nova.compute.manager [req-3ff5f4d8-f9ee-4f2c-a330-eb2469acf0e0 req-b48ef583-ed3b-4b00-af74-13d0aa1a0913 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Received event network-vif-plugged-c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1600.051779] env[69992]: DEBUG oslo_concurrency.lockutils [req-3ff5f4d8-f9ee-4f2c-a330-eb2469acf0e0 req-b48ef583-ed3b-4b00-af74-13d0aa1a0913 service nova] Acquiring lock "426efc79-924b-41bf-a890-2d27b4e862ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1600.052016] env[69992]: DEBUG oslo_concurrency.lockutils [req-3ff5f4d8-f9ee-4f2c-a330-eb2469acf0e0 req-b48ef583-ed3b-4b00-af74-13d0aa1a0913 service nova] Lock "426efc79-924b-41bf-a890-2d27b4e862ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1600.052192] env[69992]: DEBUG oslo_concurrency.lockutils [req-3ff5f4d8-f9ee-4f2c-a330-eb2469acf0e0 req-b48ef583-ed3b-4b00-af74-13d0aa1a0913 service nova] Lock "426efc79-924b-41bf-a890-2d27b4e862ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1600.052407] env[69992]: DEBUG nova.compute.manager [req-3ff5f4d8-f9ee-4f2c-a330-eb2469acf0e0 req-b48ef583-ed3b-4b00-af74-13d0aa1a0913 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] No waiting events found dispatching network-vif-plugged-c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1600.052522] env[69992]: WARNING nova.compute.manager [req-3ff5f4d8-f9ee-4f2c-a330-eb2469acf0e0 req-b48ef583-ed3b-4b00-af74-13d0aa1a0913 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Received unexpected event network-vif-plugged-c0418a66-6f0b-4419-985a-c6003fd83fe0 for instance with vm_state building and task_state spawning. [ 1600.137011] env[69992]: DEBUG nova.network.neutron [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Successfully updated port: c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1600.640307] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.640307] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1600.640420] env[69992]: DEBUG nova.network.neutron [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1601.171203] env[69992]: DEBUG nova.network.neutron [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1601.293853] env[69992]: DEBUG nova.network.neutron [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updating instance_info_cache with network_info: [{"id": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "address": "fa:16:3e:08:44:dc", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0418a66-6f", "ovs_interfaceid": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.796919] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1601.797285] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance network_info: |[{"id": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "address": "fa:16:3e:08:44:dc", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0418a66-6f", "ovs_interfaceid": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1601.797735] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:44:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0418a66-6f0b-4419-985a-c6003fd83fe0', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1601.805109] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1601.805320] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1601.805543] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4a0f430-301d-48f0-8625-a5dbda39cc42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.826151] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1601.826151] env[69992]: value = "task-2898299" [ 1601.826151] env[69992]: _type = "Task" [ 1601.826151] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.833489] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898299, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.076194] env[69992]: DEBUG nova.compute.manager [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Received event network-changed-c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1602.076403] env[69992]: DEBUG nova.compute.manager [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Refreshing instance network info cache due to event network-changed-c0418a66-6f0b-4419-985a-c6003fd83fe0. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1602.076623] env[69992]: DEBUG oslo_concurrency.lockutils [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] Acquiring lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.076767] env[69992]: DEBUG oslo_concurrency.lockutils [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] Acquired lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1602.076951] env[69992]: DEBUG nova.network.neutron [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Refreshing network info cache for port c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1602.336064] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898299, 'name': CreateVM_Task, 'duration_secs': 0.306118} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.336428] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1602.336866] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.337046] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1602.337366] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1602.337614] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6338cd3a-b921-46a7-9424-e63568068b7a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.342091] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1602.342091] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52323b62-1ca2-5523-66eb-bd32c565f0d2" [ 1602.342091] env[69992]: _type = "Task" [ 1602.342091] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.349593] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52323b62-1ca2-5523-66eb-bd32c565f0d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.746652] env[69992]: DEBUG nova.network.neutron [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updated VIF entry in instance network info cache for port c0418a66-6f0b-4419-985a-c6003fd83fe0. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1602.746998] env[69992]: DEBUG nova.network.neutron [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updating instance_info_cache with network_info: [{"id": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "address": "fa:16:3e:08:44:dc", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0418a66-6f", "ovs_interfaceid": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.853200] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52323b62-1ca2-5523-66eb-bd32c565f0d2, 'name': SearchDatastore_Task, 'duration_secs': 0.010747} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.853499] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1602.853756] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1602.853972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.854137] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1602.854379] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1602.854642] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78f55e78-0abf-4400-9c98-61e65cd8a9fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.862667] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1602.862809] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1602.863489] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-924c6354-7af3-42b8-84b0-b7d42becccb7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.868095] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1602.868095] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525356da-e70f-8256-7d00-72f03e5255cd" [ 1602.868095] env[69992]: _type = "Task" [ 1602.868095] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.875021] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525356da-e70f-8256-7d00-72f03e5255cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.249620] env[69992]: DEBUG oslo_concurrency.lockutils [req-c2cff356-0091-4c19-b8d0-8add4045ffcb req-e4a69bc5-4a92-48ac-9c4a-39b025e32365 service nova] Releasing lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1603.378011] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525356da-e70f-8256-7d00-72f03e5255cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008485} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.378777] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5c1551-6d08-4d0b-ab68-636d7af4c41e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.383358] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1603.383358] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525b6783-fe73-97e9-83f2-89eac87ac4d5" [ 1603.383358] env[69992]: _type = "Task" [ 1603.383358] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.390158] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525b6783-fe73-97e9-83f2-89eac87ac4d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.895138] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525b6783-fe73-97e9-83f2-89eac87ac4d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009722} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.895394] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1603.895673] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1603.895925] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c30822d-3a23-460d-b318-b1e89277dd18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.902334] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1603.902334] env[69992]: value = "task-2898300" [ 1603.902334] env[69992]: _type = "Task" [ 1603.902334] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.909527] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.412597] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898300, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441819} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.413027] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1604.413197] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1604.413460] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98473d94-b9de-4429-a7f7-299d56701fe9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.419738] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1604.419738] env[69992]: value = "task-2898301" [ 1604.419738] env[69992]: _type = "Task" [ 1604.419738] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.427514] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898301, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.929274] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898301, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05832} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.929529] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1604.930352] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f044c-316f-4fa1-913f-a51af10b69ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.951829] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1604.952027] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c924a7b-e94a-4676-aa9b-9e6ee7f0d7ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.971437] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1604.971437] env[69992]: value = "task-2898302" [ 1604.971437] env[69992]: _type = "Task" [ 1604.971437] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.978887] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.481376] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898302, 'name': ReconfigVM_Task, 'duration_secs': 0.264763} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.481751] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1605.482406] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e1ca4e5-6f3f-4c29-854b-46fe3ad2fd42 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.489155] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1605.489155] env[69992]: value = "task-2898303" [ 1605.489155] env[69992]: _type = "Task" [ 1605.489155] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.496292] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898303, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.998896] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898303, 'name': Rename_Task, 'duration_secs': 0.312722} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.999184] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1605.999401] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85ffb154-a853-47f3-877a-b3183d96b104 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.005977] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1606.005977] env[69992]: value = "task-2898304" [ 1606.005977] env[69992]: _type = "Task" [ 1606.005977] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.012996] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.515973] env[69992]: DEBUG oslo_vmware.api [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898304, 'name': PowerOnVM_Task, 'duration_secs': 0.474172} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.516258] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1606.516460] env[69992]: INFO nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1606.516635] env[69992]: DEBUG nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1606.517428] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc690e7c-74c0-4443-86e9-62af7a4a2e5f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.034636] env[69992]: INFO nova.compute.manager [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Took 11.98 seconds to build instance. [ 1607.536829] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f5b91407-5564-4aed-9d3a-ff24bb634cf4 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "426efc79-924b-41bf-a890-2d27b4e862ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.488s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1608.077247] env[69992]: DEBUG nova.compute.manager [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Received event network-changed-c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1608.077476] env[69992]: DEBUG nova.compute.manager [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Refreshing instance network info cache due to event network-changed-c0418a66-6f0b-4419-985a-c6003fd83fe0. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1608.077632] env[69992]: DEBUG oslo_concurrency.lockutils [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] Acquiring lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.077774] env[69992]: DEBUG oslo_concurrency.lockutils [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] Acquired lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1608.077964] env[69992]: DEBUG nova.network.neutron [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Refreshing network info cache for port c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.781688] env[69992]: DEBUG nova.network.neutron [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updated VIF entry in instance network info cache for port c0418a66-6f0b-4419-985a-c6003fd83fe0. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.782055] env[69992]: DEBUG nova.network.neutron [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updating instance_info_cache with network_info: [{"id": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "address": "fa:16:3e:08:44:dc", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0418a66-6f", "ovs_interfaceid": "c0418a66-6f0b-4419-985a-c6003fd83fe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.285336] env[69992]: DEBUG oslo_concurrency.lockutils [req-f3fbbf6a-477d-448e-97f4-db04cf6b707b req-fffbcdb6-0b42-48cc-accc-be64b581e624 service nova] Releasing lock "refresh_cache-426efc79-924b-41bf-a890-2d27b4e862ef" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1633.609848] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.610282] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.605278] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.608939] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.609175] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.609347] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1637.609522] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.609947] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.113496] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1638.113765] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1638.113901] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1638.114072] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1638.114997] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70848c4d-3a98-4f06-bf9b-ded07fb84455 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.123110] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74256791-de9f-4559-a565-4870fcca58bc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.136803] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e0e12d-1607-4cfa-8506-ffd1d9a1491f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.142589] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f6917-e63c-4568-a17e-d8b6e1e44655 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.171702] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180730MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1638.171838] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1638.172048] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1639.197183] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 90facf1a-ae81-4259-bf75-94779267699c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1639.197503] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 426efc79-924b-41bf-a890-2d27b4e862ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1639.197551] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1639.197681] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1639.231945] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3698ab9a-0ff8-4906-aa75-9ab02080d583 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.239190] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728d17c1-06de-4309-9939-ed546c93a8b6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.268170] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25fc906-c3a5-45d6-ba79-8076614ba6d2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.274726] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23d32f6-f140-4482-b1d1-0da7c230477e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.287141] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1639.817284] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 181 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1639.817511] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 181 to 182 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1639.817666] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1640.322478] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1640.322903] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.151s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1644.322102] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.458550] env[69992]: INFO nova.compute.manager [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Rebuilding instance [ 1645.496619] env[69992]: DEBUG nova.compute.manager [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1645.497532] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a6f5a9-6e2c-4403-81c1-a59c843c7bfc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.511059] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1646.511497] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7527430-3da4-465c-80ba-7295ccc228c1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.518091] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1646.518091] env[69992]: value = "task-2898305" [ 1646.518091] env[69992]: _type = "Task" [ 1646.518091] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.525999] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898305, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.027924] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898305, 'name': PowerOffVM_Task, 'duration_secs': 0.175707} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.028223] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1647.028436] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1647.029239] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea62d9a8-78d7-41fa-934f-1f35a388d5e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.035676] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1647.035906] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18219181-0280-4993-8a14-999f07ec4133 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.102201] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1647.102458] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1647.102691] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleting the datastore file [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1647.103018] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09cb3928-6aee-40ec-80ee-a3290a3947af {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.109206] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1647.109206] env[69992]: value = "task-2898307" [ 1647.109206] env[69992]: _type = "Task" [ 1647.109206] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.116970] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.619528] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141407} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.619992] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1647.619992] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1647.620153] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1648.656517] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1648.656791] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1648.656927] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1648.657121] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1648.657271] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1648.657426] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1648.657639] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1648.657800] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1648.657970] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1648.658152] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1648.658331] env[69992]: DEBUG nova.virt.hardware [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1648.659221] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a14e5cf-62f6-468a-ae32-7d4df404daef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.666853] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520ddbaf-a005-428c-ac46-5d0306beae15 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.681159] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:44:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0418a66-6f0b-4419-985a-c6003fd83fe0', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1648.688507] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1648.688722] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1648.688922] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4035edc0-ce57-497b-a630-317920f3ddb2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.709671] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1648.709671] env[69992]: value = "task-2898308" [ 1648.709671] env[69992]: _type = "Task" [ 1648.709671] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.716833] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898308, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.219566] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898308, 'name': CreateVM_Task, 'duration_secs': 0.307204} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.219782] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1649.220505] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.220672] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1649.221060] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1649.221312] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14dd95f9-b1ea-4672-bbba-518f66d536c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.225484] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1649.225484] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524bc0ee-46d8-6362-6aba-7aac85253c65" [ 1649.225484] env[69992]: _type = "Task" [ 1649.225484] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.232538] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524bc0ee-46d8-6362-6aba-7aac85253c65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.736049] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524bc0ee-46d8-6362-6aba-7aac85253c65, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.736451] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1649.736621] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1649.736852] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.736999] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1649.737227] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1649.737499] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3961bde-a363-4290-8520-0deed78d7066 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.746658] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1649.746835] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1649.747585] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a594fee4-8e3c-41f2-965d-7d082aa6114f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.753122] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1649.753122] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52a577f4-097e-0c9d-ada0-b4daab864ef8" [ 1649.753122] env[69992]: _type = "Task" [ 1649.753122] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.760707] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a577f4-097e-0c9d-ada0-b4daab864ef8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.263450] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52a577f4-097e-0c9d-ada0-b4daab864ef8, 'name': SearchDatastore_Task, 'duration_secs': 0.009259} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.264201] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c55c17b-9b43-4ae3-a4a6-4ac437527c01 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.269149] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1650.269149] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]528876a2-a38d-9d20-94f7-161f33122cac" [ 1650.269149] env[69992]: _type = "Task" [ 1650.269149] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.276337] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528876a2-a38d-9d20-94f7-161f33122cac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.779612] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]528876a2-a38d-9d20-94f7-161f33122cac, 'name': SearchDatastore_Task, 'duration_secs': 0.011252} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.780016] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1650.780115] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1650.780365] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-527db7ff-a84a-45cb-af63-bf27798d7e44 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.786782] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1650.786782] env[69992]: value = "task-2898309" [ 1650.786782] env[69992]: _type = "Task" [ 1650.786782] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.794105] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.296662] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449341} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.296923] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1651.297115] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1651.297416] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66662484-cba4-4b35-9927-c8cf4d3efa2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.303468] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1651.303468] env[69992]: value = "task-2898310" [ 1651.303468] env[69992]: _type = "Task" [ 1651.303468] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.310895] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.813518] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061118} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.813923] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1651.814593] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f9508c-bfc9-4dfb-8b68-e920a98232d3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.837406] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1651.837664] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6554dfd8-1533-4e3e-ae7c-83160b8a745b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.858098] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1651.858098] env[69992]: value = "task-2898311" [ 1651.858098] env[69992]: _type = "Task" [ 1651.858098] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.866119] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.368778] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898311, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.869034] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898311, 'name': ReconfigVM_Task, 'duration_secs': 0.650284} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.869034] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef/426efc79-924b-41bf-a890-2d27b4e862ef.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1652.869658] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d81d0a15-8e72-45cf-a1ce-1474ada4f2d9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.876230] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1652.876230] env[69992]: value = "task-2898312" [ 1652.876230] env[69992]: _type = "Task" [ 1652.876230] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.883535] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898312, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.385753] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898312, 'name': Rename_Task, 'duration_secs': 0.132165} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.386054] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.386321] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4aa76598-c47c-49b7-bc0c-9cbbce8664ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.392873] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1653.392873] env[69992]: value = "task-2898313" [ 1653.392873] env[69992]: _type = "Task" [ 1653.392873] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.400398] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898313, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.903180] env[69992]: DEBUG oslo_vmware.api [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898313, 'name': PowerOnVM_Task, 'duration_secs': 0.43069} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.903600] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1653.903692] env[69992]: DEBUG nova.compute.manager [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1653.904475] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65b1223-ec00-4dec-ad36-d22a972d09b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.421040] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1654.421129] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1654.421278] env[69992]: DEBUG nova.objects.instance [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69992) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1654.604920] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.429976] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4ec8e4eb-39b1-4f0b-a748-a1ed35ca4a90 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1690.669360] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "426efc79-924b-41bf-a890-2d27b4e862ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1690.669745] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "426efc79-924b-41bf-a890-2d27b4e862ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1690.669824] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "426efc79-924b-41bf-a890-2d27b4e862ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1690.670022] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "426efc79-924b-41bf-a890-2d27b4e862ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1690.670206] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "426efc79-924b-41bf-a890-2d27b4e862ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1690.672793] env[69992]: INFO nova.compute.manager [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Terminating instance [ 1691.177333] env[69992]: DEBUG nova.compute.manager [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1691.177585] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1691.178904] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bedbf4-d559-4701-8cee-3e7640fd00d0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.186511] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.186732] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b561cbfc-6b62-449e-9ee6-973219b6d68a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.192505] env[69992]: DEBUG oslo_vmware.api [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1691.192505] env[69992]: value = "task-2898314" [ 1691.192505] env[69992]: _type = "Task" [ 1691.192505] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.200085] env[69992]: DEBUG oslo_vmware.api [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.702122] env[69992]: DEBUG oslo_vmware.api [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898314, 'name': PowerOffVM_Task, 'duration_secs': 0.185148} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.702529] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1691.702529] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1691.702789] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a0a7793-c68c-4129-8fc5-d187869a916c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.915633] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1691.915904] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1691.916029] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleting the datastore file [datastore2] 426efc79-924b-41bf-a890-2d27b4e862ef {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1691.916309] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-49ca63c3-f192-425b-8d42-7f0bc480c8fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.923921] env[69992]: DEBUG oslo_vmware.api [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1691.923921] env[69992]: value = "task-2898316" [ 1691.923921] env[69992]: _type = "Task" [ 1691.923921] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.931349] env[69992]: DEBUG oslo_vmware.api [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.434197] env[69992]: DEBUG oslo_vmware.api [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149493} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.434456] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1692.434642] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1692.434820] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1692.434997] env[69992]: INFO nova.compute.manager [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1692.435260] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1692.435448] env[69992]: DEBUG nova.compute.manager [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1692.435549] env[69992]: DEBUG nova.network.neutron [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1692.863875] env[69992]: DEBUG nova.compute.manager [req-f53146da-0ffa-454c-9f06-9bc8abac8787 req-4268aac6-ea9d-4539-8aac-6281bc5f3ffb service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Received event network-vif-deleted-c0418a66-6f0b-4419-985a-c6003fd83fe0 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1692.864129] env[69992]: INFO nova.compute.manager [req-f53146da-0ffa-454c-9f06-9bc8abac8787 req-4268aac6-ea9d-4539-8aac-6281bc5f3ffb service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Neutron deleted interface c0418a66-6f0b-4419-985a-c6003fd83fe0; detaching it from the instance and deleting it from the info cache [ 1692.864331] env[69992]: DEBUG nova.network.neutron [req-f53146da-0ffa-454c-9f06-9bc8abac8787 req-4268aac6-ea9d-4539-8aac-6281bc5f3ffb service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.348468] env[69992]: DEBUG nova.network.neutron [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.368842] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d049d5d0-390b-4359-aa2a-9724620949e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.381343] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa4ab1e-6d10-43a5-925b-bf6e3f877226 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.410342] env[69992]: DEBUG nova.compute.manager [req-f53146da-0ffa-454c-9f06-9bc8abac8787 req-4268aac6-ea9d-4539-8aac-6281bc5f3ffb service nova] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Detach interface failed, port_id=c0418a66-6f0b-4419-985a-c6003fd83fe0, reason: Instance 426efc79-924b-41bf-a890-2d27b4e862ef could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1693.851408] env[69992]: INFO nova.compute.manager [-] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Took 1.42 seconds to deallocate network for instance. [ 1694.357988] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1694.357988] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1694.357988] env[69992]: DEBUG nova.objects.instance [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'resources' on Instance uuid 426efc79-924b-41bf-a890-2d27b4e862ef {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1694.609971] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1694.900772] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3fb0a4-6b2e-4980-910c-563a3f83d19a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.908128] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd9257e-9592-4b24-9369-078cd42968c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.937438] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d2e230-7eae-4722-a16d-8bc3589c9cc9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.944224] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deeeeba5-2122-4042-9867-76c1ef428d82 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.956653] env[69992]: DEBUG nova.compute.provider_tree [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1695.487265] env[69992]: DEBUG nova.scheduler.client.report [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updated inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with generation 182 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1695.487627] env[69992]: DEBUG nova.compute.provider_tree [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 generation from 182 to 183 during operation: update_inventory {{(pid=69992) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1695.487690] env[69992]: DEBUG nova.compute.provider_tree [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1695.993020] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.635s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1696.012762] env[69992]: INFO nova.scheduler.client.report [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted allocations for instance 426efc79-924b-41bf-a890-2d27b4e862ef [ 1696.520430] env[69992]: DEBUG oslo_concurrency.lockutils [None req-902e2b00-acb3-4888-9b1d-9a93ba8af278 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "426efc79-924b-41bf-a890-2d27b4e862ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.851s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1696.609911] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1696.610178] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.604907] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.610046] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.610046] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1697.610046] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.113348] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1698.113541] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1698.113610] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1698.113757] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1698.114644] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf7e5c4-e6d3-40c0-ad99-7177d66f206e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.123071] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b08b0c-dc22-4986-9501-68aafc7ab3f3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.136497] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cee1597-8cb3-46fa-8f85-269c861020cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.142370] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dce5790-1579-4cf1-b98b-f65c40922488 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.171468] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180690MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1698.171592] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1698.171800] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1698.550240] env[69992]: DEBUG nova.compute.manager [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1699.097854] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1699.178190] env[69992]: INFO nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating resource usage from migration 53de7af0-9aac-4bf0-9e21-7a9092f0a80b [ 1699.194461] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Migration 53de7af0-9aac-4bf0-9e21-7a9092f0a80b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1699.194461] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 90facf1a-ae81-4259-bf75-94779267699c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1699.194461] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1699.194461] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1699.227049] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71301464-545c-4ad3-b1dd-d4887ae8e7c6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.233902] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b526165-4d63-4dc6-a680-7cf3ce5bef3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.262410] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fe5c03-dabf-48da-88be-80cbcaf4dafc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.269046] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe51e63a-9c4b-45d9-8334-e4490c4b4c43 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.282412] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.785782] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1700.291340] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1700.291763] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.120s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1700.291827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.194s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1700.797705] env[69992]: INFO nova.compute.claims [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1701.293648] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.303538] env[69992]: INFO nova.compute.resource_tracker [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating resource usage from migration 53de7af0-9aac-4bf0-9e21-7a9092f0a80b [ 1701.338261] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d853fbd4-d5b4-4774-89c9-8a59b11ae11b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.345961] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e85d1d-c8c5-4198-8e9f-22824fea879e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.375270] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b9be86-7109-4f7e-9158-1e62394b885e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.381776] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1116f930-5730-4455-b9a3-054e7fac8841 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.394187] env[69992]: DEBUG nova.compute.provider_tree [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.897898] env[69992]: DEBUG nova.scheduler.client.report [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1702.404553] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.113s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1702.405024] env[69992]: INFO nova.compute.manager [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Migrating [ 1702.609126] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1702.918628] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.918827] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1702.919022] env[69992]: DEBUG nova.network.neutron [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.610017] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.610347] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1703.613379] env[69992]: DEBUG nova.network.neutron [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.120072] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] There are 26 instances to clean {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1704.120072] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 426efc79-924b-41bf-a890-2d27b4e862ef] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1704.121173] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1704.623315] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 6df2b739-02c0-40ac-b2a2-14587e3996bf] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1705.129453] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e7539d23-b4bb-48e8-89f4-ba98e6a12a01] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1705.634765] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7b549cd4-bfdc-45c5-9031-9b378ad7ee79] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1705.640804] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5cbb80-933d-4e76-95b6-6811d37d40cf {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.659725] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1706.140198] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: ea01d1b3-20e8-4dc6-afcc-cc6b7152ca6b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1706.165482] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1706.165974] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a60de8b-2ca7-4058-8f6e-82af9c00ac95 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.173633] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1706.173633] env[69992]: value = "task-2898317" [ 1706.173633] env[69992]: _type = "Task" [ 1706.173633] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.181707] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.643296] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 57cfbf0e-6a20-4a25-a5e1-8d35e2eaf7dc] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1706.683350] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898317, 'name': PowerOffVM_Task, 'duration_secs': 0.178706} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.683599] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1706.683765] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1707.146759] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: d2b4482f-cc98-4e3d-9996-397f4f0b2ead] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1707.189172] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1707.189429] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1707.189598] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1707.189777] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1707.189923] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1707.190088] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1707.190299] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1707.190458] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1707.190627] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1707.190791] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1707.190964] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1707.195994] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66a91c42-6974-4d6c-83bf-28f9afe7f59c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.211658] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1707.211658] env[69992]: value = "task-2898318" [ 1707.211658] env[69992]: _type = "Task" [ 1707.211658] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.219471] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898318, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.649755] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 7c8b830a-e89c-4d97-a987-141797aaa55f] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1707.722057] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898318, 'name': ReconfigVM_Task, 'duration_secs': 0.161343} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.722057] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1708.152727] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 5df7d031-66bf-43eb-a05b-07b6cff9db59] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1708.228397] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1708.228661] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1708.228843] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1708.229086] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1708.229281] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1708.229442] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1708.229672] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1708.229849] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1708.230044] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1708.230221] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1708.230400] env[69992]: DEBUG nova.virt.hardware [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1708.235754] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1708.236060] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b43512dc-b038-4698-a3ff-a34ba8c2bee5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.255323] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1708.255323] env[69992]: value = "task-2898319" [ 1708.255323] env[69992]: _type = "Task" [ 1708.255323] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.262924] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.656094] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9d290fe7-12d2-416e-9608-7a8e7e9b2f65] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1708.764880] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898319, 'name': ReconfigVM_Task, 'duration_secs': 0.150098} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.765228] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1708.765987] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0161af-c7d4-489c-8576-9f007659dd74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.788218] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 90facf1a-ae81-4259-bf75-94779267699c/90facf1a-ae81-4259-bf75-94779267699c.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1708.788439] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca6ec933-2c9f-4926-be28-eb7dd4d715a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.805532] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1708.805532] env[69992]: value = "task-2898320" [ 1708.805532] env[69992]: _type = "Task" [ 1708.805532] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.812795] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.159376] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: c2268475-6506-4c1f-8f8a-7b8d3a5cb28c] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1709.315864] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898320, 'name': ReconfigVM_Task, 'duration_secs': 0.248503} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.316145] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 90facf1a-ae81-4259-bf75-94779267699c/90facf1a-ae81-4259-bf75-94779267699c.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.316415] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1709.662489] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 904b8020-3060-4611-bdd4-650e288d69fd] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1709.823222] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f2856a-aadf-46f8-931b-c38d4097b7b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.842185] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69234d69-b3c4-44be-a80f-d2806a29577e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.858686] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1710.165705] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: b7af455d-a3a7-480f-b778-9eb3724fa6f1] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1710.395959] env[69992]: DEBUG nova.network.neutron [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Port 0ec50d92-4ea0-44af-b9fd-14443de36a12 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1710.668604] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: d50d7460-2b70-45bc-940f-7d45f329fa1c] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1711.172376] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: fc769b20-222e-4ff0-8ffd-7b24e4658b14] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1711.418145] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1711.418430] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1711.418581] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1711.676939] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e9018928-5237-4ba1-8c18-9ff1ec64a79c] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1712.179642] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: d5a6a189-0a7d-49ba-acab-35a244cf76eb] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1712.452574] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.452837] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1712.452940] env[69992]: DEBUG nova.network.neutron [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1712.682658] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 5c8b5f76-918a-44ac-b5b4-5f5f252da936] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1713.154931] env[69992]: DEBUG nova.network.neutron [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.185443] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: fe3624b0-7d4a-4a16-83e3-3f28c2a74006] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1713.657904] env[69992]: DEBUG oslo_concurrency.lockutils [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1713.688451] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 9464339a-b760-47e9-bc75-e88ce18bf71b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1714.182254] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99790b75-a567-41e4-aa03-6d2839e5d490 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.201446] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 6ccc70f5-4857-4af3-99a1-f60ec35aebaf] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1714.203657] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc24563-82fe-4ece-af1f-642ef1156d29 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.210177] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1714.707838] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 25b0b8b6-a3b7-4f61-8070-bc11bbe9e3a7] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1714.715738] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1714.716032] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0d4eda7-1853-4e34-abe4-b9a50ca621de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.724392] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1714.724392] env[69992]: value = "task-2898321" [ 1714.724392] env[69992]: _type = "Task" [ 1714.724392] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.731771] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898321, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.210902] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 08869f38-9609-4f7f-9110-2f26fd1cb3f7] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1715.234082] env[69992]: DEBUG oslo_vmware.api [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898321, 'name': PowerOnVM_Task, 'duration_secs': 0.396618} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.234884] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1715.235081] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-e07c4d5e-13ec-43d1-8317-9031fee4e97b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance '90facf1a-ae81-4259-bf75-94779267699c' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1715.714418] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e95e47c2-d82e-4153-8d16-7b65d992e91a] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1716.217582] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 88a3b1ab-d4ee-48c9-a0f2-da94e93f4cbb] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1716.720947] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: fcbe1142-72dc-4a02-af9b-e03a2031a247] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1717.224436] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.224669] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances with incomplete migration {{(pid=69992) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1717.636620] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1717.636920] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1717.637134] env[69992]: DEBUG nova.compute.manager [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Going to confirm migration 9 {{(pid=69992) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1717.727488] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.202681] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.202879] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1718.203068] env[69992]: DEBUG nova.network.neutron [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1718.203255] env[69992]: DEBUG nova.objects.instance [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'info_cache' on Instance uuid 90facf1a-ae81-4259-bf75-94779267699c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1719.607446] env[69992]: DEBUG nova.network.neutron [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [{"id": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "address": "fa:16:3e:1a:4d:23", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ec50d92-4e", "ovs_interfaceid": "0ec50d92-4ea0-44af-b9fd-14443de36a12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.110108] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-90facf1a-ae81-4259-bf75-94779267699c" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1720.110395] env[69992]: DEBUG nova.objects.instance [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'migration_context' on Instance uuid 90facf1a-ae81-4259-bf75-94779267699c {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1720.613566] env[69992]: DEBUG nova.objects.base [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Object Instance<90facf1a-ae81-4259-bf75-94779267699c> lazy-loaded attributes: info_cache,migration_context {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1720.614551] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbcb102-76c8-4e3e-bff2-593b078dd903 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.633545] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-490d48bb-a3c4-407b-af28-634ee7f1d8c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.638415] env[69992]: DEBUG oslo_vmware.api [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1720.638415] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52af49a7-4d4a-1b72-2381-478cec93be01" [ 1720.638415] env[69992]: _type = "Task" [ 1720.638415] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.645509] env[69992]: DEBUG oslo_vmware.api [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52af49a7-4d4a-1b72-2381-478cec93be01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.148546] env[69992]: DEBUG oslo_vmware.api [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52af49a7-4d4a-1b72-2381-478cec93be01, 'name': SearchDatastore_Task, 'duration_secs': 0.008713} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.148821] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1721.149076] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1721.692883] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3691fd2d-d2ee-4789-8cd9-d14f4337b5e4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.701267] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7563a93b-a840-4797-b031-c66438d2aa74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.730403] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b761527-2946-45ef-aab1-34b92fd6a96c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.736907] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb76bdf9-83ce-4d05-ac78-bd090bd1d3f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.749362] env[69992]: DEBUG nova.compute.provider_tree [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.252788] env[69992]: DEBUG nova.scheduler.client.report [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1723.263262] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.114s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1723.822103] env[69992]: INFO nova.scheduler.client.report [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted allocation for migration 53de7af0-9aac-4bf0-9e21-7a9092f0a80b [ 1724.328212] env[69992]: DEBUG oslo_concurrency.lockutils [None req-71375394-a832-42df-b976-47ecf8a9a0dd tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.691s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1724.541372] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1724.541632] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1724.541886] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "90facf1a-ae81-4259-bf75-94779267699c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1724.542089] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1724.542265] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1724.544423] env[69992]: INFO nova.compute.manager [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Terminating instance [ 1725.048867] env[69992]: DEBUG nova.compute.manager [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1725.049150] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1725.050027] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f441da0-635a-4584-bdd4-8fa3662c9f31 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.057644] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1725.057858] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9377ffcf-9cca-4425-a820-a788753ad8ff {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.064164] env[69992]: DEBUG oslo_vmware.api [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1725.064164] env[69992]: value = "task-2898322" [ 1725.064164] env[69992]: _type = "Task" [ 1725.064164] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.071769] env[69992]: DEBUG oslo_vmware.api [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.574676] env[69992]: DEBUG oslo_vmware.api [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898322, 'name': PowerOffVM_Task, 'duration_secs': 0.21339} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.575129] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1725.575129] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1725.575276] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a467de4b-8e68-40f0-98a7-954d5740426d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.649092] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1725.649371] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1725.649527] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleting the datastore file [datastore1] 90facf1a-ae81-4259-bf75-94779267699c {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1725.649794] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-277706c5-5797-4d97-9482-52f5940d48e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.655774] env[69992]: DEBUG oslo_vmware.api [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1725.655774] env[69992]: value = "task-2898324" [ 1725.655774] env[69992]: _type = "Task" [ 1725.655774] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.663219] env[69992]: DEBUG oslo_vmware.api [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.165300] env[69992]: DEBUG oslo_vmware.api [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127927} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.165561] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1726.165754] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1726.165949] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1726.166145] env[69992]: INFO nova.compute.manager [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1726.166389] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1726.166579] env[69992]: DEBUG nova.compute.manager [-] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1726.166674] env[69992]: DEBUG nova.network.neutron [-] [instance: 90facf1a-ae81-4259-bf75-94779267699c] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1726.592828] env[69992]: DEBUG nova.compute.manager [req-1d8b2445-5459-4895-b575-cf77c7c5a557 req-cdf5a4c3-face-4cd9-88bb-3a94c9641ef5 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Received event network-vif-deleted-0ec50d92-4ea0-44af-b9fd-14443de36a12 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1726.593138] env[69992]: INFO nova.compute.manager [req-1d8b2445-5459-4895-b575-cf77c7c5a557 req-cdf5a4c3-face-4cd9-88bb-3a94c9641ef5 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Neutron deleted interface 0ec50d92-4ea0-44af-b9fd-14443de36a12; detaching it from the instance and deleting it from the info cache [ 1726.593334] env[69992]: DEBUG nova.network.neutron [req-1d8b2445-5459-4895-b575-cf77c7c5a557 req-cdf5a4c3-face-4cd9-88bb-3a94c9641ef5 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.070555] env[69992]: DEBUG nova.network.neutron [-] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.095972] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e438694-8dc0-42fc-a778-cd3fe4698e2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.106728] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434aa893-0bb9-4c29-b64a-832f4003e1b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.131934] env[69992]: DEBUG nova.compute.manager [req-1d8b2445-5459-4895-b575-cf77c7c5a557 req-cdf5a4c3-face-4cd9-88bb-3a94c9641ef5 service nova] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Detach interface failed, port_id=0ec50d92-4ea0-44af-b9fd-14443de36a12, reason: Instance 90facf1a-ae81-4259-bf75-94779267699c could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1727.574221] env[69992]: INFO nova.compute.manager [-] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Took 1.41 seconds to deallocate network for instance. [ 1728.080507] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1728.080927] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1728.080982] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1728.102233] env[69992]: INFO nova.scheduler.client.report [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted allocations for instance 90facf1a-ae81-4259-bf75-94779267699c [ 1728.609887] env[69992]: DEBUG oslo_concurrency.lockutils [None req-7bdf9681-8e37-4454-8e46-b94414a48ac0 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "90facf1a-ae81-4259-bf75-94779267699c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.068s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1729.453260] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "2359591e-4149-4594-bcd8-55cb74d1da24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1729.453570] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1729.955469] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1730.474454] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1730.474723] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1730.476145] env[69992]: INFO nova.compute.claims [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1731.510304] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b284bb-014d-4086-a785-e30c2f9b5a1b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.517507] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873c52d5-b045-4f5d-8e46-e57249064288 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.547750] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dc74f5-8ba2-4559-a8c5-1460bb5f576f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.554482] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5169d7-55be-4bbc-8fef-72bff79e05f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.567563] env[69992]: DEBUG nova.compute.provider_tree [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.070705] env[69992]: DEBUG nova.scheduler.client.report [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1732.575974] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1732.576567] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1733.081670] env[69992]: DEBUG nova.compute.utils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1733.083173] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1733.083361] env[69992]: DEBUG nova.network.neutron [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1733.129125] env[69992]: DEBUG nova.policy [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99bd7545f7d04aa28e625ce6c5491bb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '122bc9ffa8f54a34af6047517fab0a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1733.399207] env[69992]: DEBUG nova.network.neutron [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Successfully created port: da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1733.586967] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1734.597602] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1734.626205] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1734.626496] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1734.626689] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1734.626878] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1734.627051] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1734.627205] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1734.627418] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1734.627579] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1734.627745] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1734.627907] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1734.628108] env[69992]: DEBUG nova.virt.hardware [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1734.628980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02998e23-50f9-4b14-8802-a7c820391132 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.637225] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ed4743-cc61-4175-bf52-a473d2a2d221 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.753510] env[69992]: DEBUG nova.compute.manager [req-be7336ea-4762-4d83-a526-5443fcde39a4 req-8c634dbc-f68a-452c-ac6d-378228bee7b1 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Received event network-vif-plugged-da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1734.753732] env[69992]: DEBUG oslo_concurrency.lockutils [req-be7336ea-4762-4d83-a526-5443fcde39a4 req-8c634dbc-f68a-452c-ac6d-378228bee7b1 service nova] Acquiring lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1734.753944] env[69992]: DEBUG oslo_concurrency.lockutils [req-be7336ea-4762-4d83-a526-5443fcde39a4 req-8c634dbc-f68a-452c-ac6d-378228bee7b1 service nova] Lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1734.754137] env[69992]: DEBUG oslo_concurrency.lockutils [req-be7336ea-4762-4d83-a526-5443fcde39a4 req-8c634dbc-f68a-452c-ac6d-378228bee7b1 service nova] Lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1734.754402] env[69992]: DEBUG nova.compute.manager [req-be7336ea-4762-4d83-a526-5443fcde39a4 req-8c634dbc-f68a-452c-ac6d-378228bee7b1 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] No waiting events found dispatching network-vif-plugged-da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1734.754520] env[69992]: WARNING nova.compute.manager [req-be7336ea-4762-4d83-a526-5443fcde39a4 req-8c634dbc-f68a-452c-ac6d-378228bee7b1 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Received unexpected event network-vif-plugged-da930938-8ab4-484a-9207-57babddf0038 for instance with vm_state building and task_state spawning. [ 1734.844458] env[69992]: DEBUG nova.network.neutron [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Successfully updated port: da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.346311] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.346514] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1735.346766] env[69992]: DEBUG nova.network.neutron [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1735.907624] env[69992]: DEBUG nova.network.neutron [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.023664] env[69992]: DEBUG nova.network.neutron [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.526713] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1736.527073] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Instance network_info: |[{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1736.527540] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:23:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da930938-8ab4-484a-9207-57babddf0038', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1736.534947] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1736.535198] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1736.535460] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1d62635-dd91-4d12-9617-1104d8223bf9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.557118] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1736.557118] env[69992]: value = "task-2898325" [ 1736.557118] env[69992]: _type = "Task" [ 1736.557118] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.564617] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898325, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.782420] env[69992]: DEBUG nova.compute.manager [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Received event network-changed-da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1736.782647] env[69992]: DEBUG nova.compute.manager [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Refreshing instance network info cache due to event network-changed-da930938-8ab4-484a-9207-57babddf0038. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1736.782906] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.783070] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1736.783235] env[69992]: DEBUG nova.network.neutron [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Refreshing network info cache for port da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1737.066576] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898325, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.484893] env[69992]: DEBUG nova.network.neutron [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updated VIF entry in instance network info cache for port da930938-8ab4-484a-9207-57babddf0038. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1737.485301] env[69992]: DEBUG nova.network.neutron [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.567268] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898325, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.987982] env[69992]: DEBUG oslo_concurrency.lockutils [req-8b09cb8b-6df1-41d6-8a14-080f51a81b48 req-f4759c2a-01db-41b1-b794-e4b8d2c643cd service nova] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1738.067972] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898325, 'name': CreateVM_Task, 'duration_secs': 1.313828} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.068300] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1738.068793] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.068974] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1738.069323] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1738.069567] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eb54342-7094-4bfb-be16-0a8ac5afeaf5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.073999] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1738.073999] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52f4c427-40fd-8b59-6b0a-74a1d5e1608a" [ 1738.073999] env[69992]: _type = "Task" [ 1738.073999] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.083790] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f4c427-40fd-8b59-6b0a-74a1d5e1608a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.585422] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52f4c427-40fd-8b59-6b0a-74a1d5e1608a, 'name': SearchDatastore_Task, 'duration_secs': 0.011176} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.585704] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1738.585930] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1738.586185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.586338] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1738.586512] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1738.586756] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d0915ec-1dbb-4637-ae69-0ebc6810ebda {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.595412] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1738.595642] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1738.596260] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-359113c0-29d9-48ea-ac75-e3a572677bc1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.600883] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1738.600883] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52c96df2-7562-b1ea-89a5-ce8193cc9e14" [ 1738.600883] env[69992]: _type = "Task" [ 1738.600883] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.608115] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c96df2-7562-b1ea-89a5-ce8193cc9e14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.111774] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52c96df2-7562-b1ea-89a5-ce8193cc9e14, 'name': SearchDatastore_Task, 'duration_secs': 0.007791} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.112569] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07938709-9b6c-4947-bd34-eb6f56b2c2e0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.117222] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1739.117222] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]529cf197-da8a-d997-7d67-cfe266fa2dba" [ 1739.117222] env[69992]: _type = "Task" [ 1739.117222] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.124476] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529cf197-da8a-d997-7d67-cfe266fa2dba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.627582] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]529cf197-da8a-d997-7d67-cfe266fa2dba, 'name': SearchDatastore_Task, 'duration_secs': 0.00923} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.627828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1739.628094] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1739.628347] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a13912db-cc54-4938-9d5e-b3d298d8bb97 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.634696] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1739.634696] env[69992]: value = "task-2898326" [ 1739.634696] env[69992]: _type = "Task" [ 1739.634696] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.642037] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.144428] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898326, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431817} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.144755] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1740.144925] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1740.145149] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1f231a9-0679-4e9a-a7c1-e3ec093d9b61 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.151628] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1740.151628] env[69992]: value = "task-2898327" [ 1740.151628] env[69992]: _type = "Task" [ 1740.151628] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.158437] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.662095] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063529} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.662382] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1740.663133] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65564e7-554e-4bd7-9d66-f955d7b957bd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.684626] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.684861] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-831ec9b5-9a7d-4258-92ec-bfd59052acef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.704817] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1740.704817] env[69992]: value = "task-2898328" [ 1740.704817] env[69992]: _type = "Task" [ 1740.704817] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.712058] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.215460] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898328, 'name': ReconfigVM_Task, 'duration_secs': 0.288942} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.215873] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1741.216437] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-128885ba-a3fc-4e60-b741-907ec9ecf537 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.222745] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1741.222745] env[69992]: value = "task-2898329" [ 1741.222745] env[69992]: _type = "Task" [ 1741.222745] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.229962] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898329, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.731931] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898329, 'name': Rename_Task, 'duration_secs': 0.132001} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.732229] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1741.732496] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-410de6f0-4814-40e4-abc0-01d6c6f20477 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.739489] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1741.739489] env[69992]: value = "task-2898330" [ 1741.739489] env[69992]: _type = "Task" [ 1741.739489] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.747331] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.249608] env[69992]: DEBUG oslo_vmware.api [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898330, 'name': PowerOnVM_Task, 'duration_secs': 0.439937} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.249946] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1742.250060] env[69992]: INFO nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1742.250244] env[69992]: DEBUG nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1742.250997] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9155eed8-6c05-4d74-a70c-64d5646ea0cc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.768614] env[69992]: INFO nova.compute.manager [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Took 12.31 seconds to build instance. [ 1743.271235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f1f09a51-5222-48f9-bb55-7c68dac7a1b2 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.818s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1743.483103] env[69992]: DEBUG nova.compute.manager [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Received event network-changed-da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1743.483103] env[69992]: DEBUG nova.compute.manager [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Refreshing instance network info cache due to event network-changed-da930938-8ab4-484a-9207-57babddf0038. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1743.483103] env[69992]: DEBUG oslo_concurrency.lockutils [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.483103] env[69992]: DEBUG oslo_concurrency.lockutils [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1743.483103] env[69992]: DEBUG nova.network.neutron [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Refreshing network info cache for port da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1744.206008] env[69992]: DEBUG nova.network.neutron [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updated VIF entry in instance network info cache for port da930938-8ab4-484a-9207-57babddf0038. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1744.206408] env[69992]: DEBUG nova.network.neutron [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.709369] env[69992]: DEBUG oslo_concurrency.lockutils [req-1a9127aa-fcbe-49bd-8b5e-50765258ea9a req-13d67139-6698-4a94-86b5-f65ac35f7a9d service nova] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1771.228492] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.228863] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.228900] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.229059] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.229230] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.229379] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.229526] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.229669] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1771.229815] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.732528] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1771.732832] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1771.733030] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1771.733195] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1771.734567] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff243a0-3950-4780-acd6-937e336233e5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.742577] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f05b66-1514-4d6c-ad7c-430c36a723ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.757098] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7138d7f4-26f2-44b6-9b4a-9e7ac87a83f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.762959] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbeb2145-2584-497c-816a-0c7f2f619070 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.791126] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181097MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1771.791268] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1771.791478] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1772.819073] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance 2359591e-4149-4594-bcd8-55cb74d1da24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1772.819321] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1772.819440] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1772.845258] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d50db6-4b4a-42bf-b403-9c9c278ee478 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.852351] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda87a2e-d7fb-4aa2-910b-caadc7069780 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.884208] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c44b8c-cd67-4f09-9d4f-c14358a28982 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.892301] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b97a0b-b87b-4a25-84dc-0425b25f62e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.905863] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.408722] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1773.913670] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1773.914036] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.122s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1779.291076] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.155902] env[69992]: DEBUG nova.compute.manager [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Stashing vm_state: active {{(pid=69992) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1781.675954] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1781.676241] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1782.181788] env[69992]: INFO nova.compute.claims [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1782.687498] env[69992]: INFO nova.compute.resource_tracker [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating resource usage from migration a5ee268c-d62b-43c9-844f-ac3e3235d2d5 [ 1782.723287] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4496dd4-072d-40a8-acd8-d0241371a23f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.731716] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4492354d-71ad-413c-bc1a-543f5b3abedb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.760695] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a4a7b7-d5d5-491a-85fe-3ead08caae22 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.767573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7978595c-fe8e-4e23-940b-e1c2910b43f6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.780151] env[69992]: DEBUG nova.compute.provider_tree [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.283910] env[69992]: DEBUG nova.scheduler.client.report [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1783.788799] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.112s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1783.789087] env[69992]: INFO nova.compute.manager [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Migrating [ 1784.303153] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.303558] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1784.303558] env[69992]: DEBUG nova.network.neutron [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.999545] env[69992]: DEBUG nova.network.neutron [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.502744] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1787.016846] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253a5f96-8c84-42cf-aa24-9124cbf44573 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.035645] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 0 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1787.541858] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.542173] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87386222-0819-4246-81fa-d14127f5b144 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.549750] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1787.549750] env[69992]: value = "task-2898331" [ 1787.549750] env[69992]: _type = "Task" [ 1787.549750] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.557613] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.059535] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898331, 'name': PowerOffVM_Task, 'duration_secs': 0.193018} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.059866] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1788.059902] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 17 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1788.566825] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1788.567117] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1788.567331] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1788.567554] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1788.567706] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1788.567855] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1788.568074] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1788.568238] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1788.568405] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1788.568567] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1788.568735] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1788.573907] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cdc452d-12b6-4103-a23e-47864c43eb0f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.589977] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1788.589977] env[69992]: value = "task-2898332" [ 1788.589977] env[69992]: _type = "Task" [ 1788.589977] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.598157] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.100202] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898332, 'name': ReconfigVM_Task, 'duration_secs': 0.156543} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.100512] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 33 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1789.607100] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1789.607359] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1789.607499] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1789.607680] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1789.607828] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1789.607977] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1789.608204] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1789.608364] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1789.608533] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1789.608696] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1789.608870] env[69992]: DEBUG nova.virt.hardware [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1789.614111] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1789.614391] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bc846ab-123a-4530-913e-9868ab7ba9a5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.634972] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1789.634972] env[69992]: value = "task-2898333" [ 1789.634972] env[69992]: _type = "Task" [ 1789.634972] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.642415] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898333, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.144716] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898333, 'name': ReconfigVM_Task, 'duration_secs': 0.163917} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.146027] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1790.146215] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ef9c06-bab1-4116-9b49-5d4f06548084 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.167473] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1790.167956] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac951542-3025-4228-a42e-05c8ce7b05bb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.184955] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1790.184955] env[69992]: value = "task-2898334" [ 1790.184955] env[69992]: _type = "Task" [ 1790.184955] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.191927] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.694518] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898334, 'name': ReconfigVM_Task, 'duration_secs': 0.253543} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.696059] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1790.696059] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 50 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1791.201428] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed55f216-f8ee-42b9-b255-0e9c0ea1c017 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.220587] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb10f25e-e8b4-49b3-af5e-3055541205b4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.237926] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 67 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1791.776097] env[69992]: DEBUG nova.network.neutron [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Port da930938-8ab4-484a-9207-57babddf0038 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1792.797537] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1792.797892] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1792.797931] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1793.848514] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.848746] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1793.848886] env[69992]: DEBUG nova.network.neutron [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.564915] env[69992]: DEBUG nova.network.neutron [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.067572] env[69992]: DEBUG oslo_concurrency.lockutils [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1795.592327] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c3c06a-2b4d-4bfb-af06-0009db2f8626 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.611128] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf3864a-929c-4b87-ad72-f89d1b7c99e2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.617987] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 83 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1796.124734] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.125183] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5122a12f-4ba8-4fdc-bd74-82855ced053f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.134298] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1796.134298] env[69992]: value = "task-2898335" [ 1796.134298] env[69992]: _type = "Task" [ 1796.134298] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.142167] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.644454] env[69992]: DEBUG oslo_vmware.api [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898335, 'name': PowerOnVM_Task, 'duration_secs': 0.370027} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.644715] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1796.644896] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-70bcc343-b0b6-4388-ac3b-6f976f71779a tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance '2359591e-4149-4594-bcd8-55cb74d1da24' progress to 100 {{(pid=69992) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1799.565481] env[69992]: DEBUG nova.network.neutron [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Port da930938-8ab4-484a-9207-57babddf0038 binding to destination host cpu-1 is already ACTIVE {{(pid=69992) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1799.565802] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.565925] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1799.566106] env[69992]: DEBUG nova.network.neutron [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1800.254276] env[69992]: DEBUG nova.network.neutron [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.757464] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1801.260672] env[69992]: DEBUG nova.compute.manager [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69992) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1801.260912] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1801.261228] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1801.764781] env[69992]: DEBUG nova.objects.instance [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'migration_context' on Instance uuid 2359591e-4149-4594-bcd8-55cb74d1da24 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1802.312128] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b122972-8bf3-4c70-add1-80ba4fc91ade {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.319676] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb18d37-092a-4870-b562-db2f1e997602 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.349657] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c89b1de-23c0-4046-9c2a-d45e41c304a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.357069] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209d20de-70f1-41e0-89c2-b32104451f2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.370361] env[69992]: DEBUG nova.compute.provider_tree [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.873307] env[69992]: DEBUG nova.scheduler.client.report [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1803.884088] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.623s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1805.421211] env[69992]: INFO nova.compute.manager [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Swapping old allocation on dict_keys(['9dc5dd7f-a3af-48a9-a04e-f6c1d333da28']) held by migration a5ee268c-d62b-43c9-844f-ac3e3235d2d5 for instance [ 1805.442864] env[69992]: DEBUG nova.scheduler.client.report [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Overwriting current allocation {'allocations': {'9dc5dd7f-a3af-48a9-a04e-f6c1d333da28': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 189}}, 'project_id': '122bc9ffa8f54a34af6047517fab0a9a', 'user_id': '99bd7545f7d04aa28e625ce6c5491bb6', 'consumer_generation': 1} on consumer 2359591e-4149-4594-bcd8-55cb74d1da24 {{(pid=69992) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1805.606649] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.606882] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1805.607089] env[69992]: DEBUG nova.network.neutron [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.302974] env[69992]: DEBUG nova.network.neutron [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [{"id": "da930938-8ab4-484a-9207-57babddf0038", "address": "fa:16:3e:5b:23:71", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda930938-8a", "ovs_interfaceid": "da930938-8ab4-484a-9207-57babddf0038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.806302] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-2359591e-4149-4594-bcd8-55cb74d1da24" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1806.806779] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1806.807127] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-830c4812-c111-4b84-ba7c-138e874deee2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.814168] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1806.814168] env[69992]: value = "task-2898336" [ 1806.814168] env[69992]: _type = "Task" [ 1806.814168] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.822553] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.323475] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898336, 'name': PowerOffVM_Task, 'duration_secs': 0.214824} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.323727] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1807.324400] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1807.324634] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1807.324798] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1807.324983] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1807.325146] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1807.325297] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1807.325532] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1807.325724] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1807.325898] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1807.326073] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1807.326251] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1807.331071] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d80bf5b-2e7c-4414-a33a-dca17c51abc4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.346066] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1807.346066] env[69992]: value = "task-2898337" [ 1807.346066] env[69992]: _type = "Task" [ 1807.346066] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.353257] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898337, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.857102] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898337, 'name': ReconfigVM_Task, 'duration_secs': 0.135803} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.857949] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49af7311-e9cf-4a71-aa00-3eed8c0ca90a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.875836] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1807.876082] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1807.876244] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1807.876427] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1807.876573] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1807.876742] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1807.877022] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1807.877218] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1807.877369] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1807.877534] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1807.877706] env[69992]: DEBUG nova.virt.hardware [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1807.878494] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae0f3ff3-4ee4-4e31-bbb4-62d6f5f680d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.883571] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1807.883571] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5263673e-c24f-b654-d3d4-db8a7a0efbfa" [ 1807.883571] env[69992]: _type = "Task" [ 1807.883571] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.891070] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5263673e-c24f-b654-d3d4-db8a7a0efbfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.394068] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5263673e-c24f-b654-d3d4-db8a7a0efbfa, 'name': SearchDatastore_Task, 'duration_secs': 0.008419} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.399610] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1808.399896] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4464fde8-ec7b-4836-813f-dee47987c736 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.417212] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1808.417212] env[69992]: value = "task-2898338" [ 1808.417212] env[69992]: _type = "Task" [ 1808.417212] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.424412] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.926647] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898338, 'name': ReconfigVM_Task, 'duration_secs': 0.192698} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.926990] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1808.927668] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828fa6d8-8d59-4e48-a23b-e2e8677b4e12 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.949802] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1808.950045] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c03c8157-21b2-417c-982e-b71600798c8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.968502] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1808.968502] env[69992]: value = "task-2898339" [ 1808.968502] env[69992]: _type = "Task" [ 1808.968502] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.976291] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.478524] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898339, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.979019] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898339, 'name': ReconfigVM_Task, 'duration_secs': 0.821269} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.979398] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24/2359591e-4149-4594-bcd8-55cb74d1da24.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.980081] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8a5d7e-24b9-4b3a-a7bc-9072fc8e698b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.997558] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5662a14d-ce27-4eb9-bcb9-854758ca6c76 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.015336] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372c6999-7b24-4f72-8e4d-c5f8350cdd8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.033573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acf02ce-f25c-439c-bf66-85f53199af8f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.040228] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1810.040445] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb82bb12-056e-46f9-b1e8-7cb61103cda7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.046341] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1810.046341] env[69992]: value = "task-2898340" [ 1810.046341] env[69992]: _type = "Task" [ 1810.046341] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.053435] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.557347] env[69992]: DEBUG oslo_vmware.api [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898340, 'name': PowerOnVM_Task, 'duration_secs': 0.394805} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.557610] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1811.568389] env[69992]: INFO nova.compute.manager [None req-ea440076-2950-4164-b7d0-38c74e4c9bb1 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance to original state: 'active' [ 1813.166856] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "2359591e-4149-4594-bcd8-55cb74d1da24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1813.167261] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1813.167385] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1813.167574] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1813.167749] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1813.169852] env[69992]: INFO nova.compute.manager [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Terminating instance [ 1813.673341] env[69992]: DEBUG nova.compute.manager [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1813.673561] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1813.674601] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8549d223-cc80-4ebf-b958-7fe8e9910ced {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.682546] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1813.682763] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8bc6261c-2030-47d7-aed4-54f3d21911a8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.688258] env[69992]: DEBUG oslo_vmware.api [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1813.688258] env[69992]: value = "task-2898341" [ 1813.688258] env[69992]: _type = "Task" [ 1813.688258] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.695907] env[69992]: DEBUG oslo_vmware.api [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.198371] env[69992]: DEBUG oslo_vmware.api [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898341, 'name': PowerOffVM_Task, 'duration_secs': 0.180316} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.198687] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1814.198740] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1814.199054] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c962484-f1d5-4f70-9f46-c3cacf32893d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.262708] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1814.262911] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1814.263111] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleting the datastore file [datastore1] 2359591e-4149-4594-bcd8-55cb74d1da24 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1814.263368] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-208c25e1-6a67-41ef-8e65-be27aee1492c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.270307] env[69992]: DEBUG oslo_vmware.api [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1814.270307] env[69992]: value = "task-2898343" [ 1814.270307] env[69992]: _type = "Task" [ 1814.270307] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.277834] env[69992]: DEBUG oslo_vmware.api [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.779969] env[69992]: DEBUG oslo_vmware.api [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141083} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.780230] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1814.780415] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1814.780589] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1814.780760] env[69992]: INFO nova.compute.manager [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1814.781017] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1814.781217] env[69992]: DEBUG nova.compute.manager [-] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1814.781311] env[69992]: DEBUG nova.network.neutron [-] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1815.295366] env[69992]: DEBUG nova.compute.manager [req-bd5b4f83-898e-403c-bc98-21dfaffc0e26 req-8487bf20-1b56-48b7-a7a8-cbaee70cbea8 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Received event network-vif-deleted-da930938-8ab4-484a-9207-57babddf0038 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1815.295603] env[69992]: INFO nova.compute.manager [req-bd5b4f83-898e-403c-bc98-21dfaffc0e26 req-8487bf20-1b56-48b7-a7a8-cbaee70cbea8 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Neutron deleted interface da930938-8ab4-484a-9207-57babddf0038; detaching it from the instance and deleting it from the info cache [ 1815.295726] env[69992]: DEBUG nova.network.neutron [req-bd5b4f83-898e-403c-bc98-21dfaffc0e26 req-8487bf20-1b56-48b7-a7a8-cbaee70cbea8 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.700049] env[69992]: DEBUG nova.network.neutron [-] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.797851] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72cabbf2-654b-4bfa-9a7f-7b3fbb5c0fec {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.808479] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b53be07-efa7-4741-854b-f5254fa07806 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.833074] env[69992]: DEBUG nova.compute.manager [req-bd5b4f83-898e-403c-bc98-21dfaffc0e26 req-8487bf20-1b56-48b7-a7a8-cbaee70cbea8 service nova] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Detach interface failed, port_id=da930938-8ab4-484a-9207-57babddf0038, reason: Instance 2359591e-4149-4594-bcd8-55cb74d1da24 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1816.203171] env[69992]: INFO nova.compute.manager [-] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Took 1.42 seconds to deallocate network for instance. [ 1816.609250] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1816.709705] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1816.710018] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1816.710221] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1816.727726] env[69992]: INFO nova.scheduler.client.report [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted allocations for instance 2359591e-4149-4594-bcd8-55cb74d1da24 [ 1817.234659] env[69992]: DEBUG oslo_concurrency.lockutils [None req-14d68511-4c31-4506-ac26-d33cfe8797e5 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "2359591e-4149-4594-bcd8-55cb74d1da24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.067s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1817.604935] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1817.609488] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.069897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "e488d413-653b-4730-b7d9-1db8320e9c6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1818.070133] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1818.572420] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1818.608906] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.609127] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.609297] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1819.090124] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1819.090456] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1819.092146] env[69992]: INFO nova.compute.claims [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1819.111883] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1820.125921] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca7b46d-9bc8-4078-89ae-ff601ac8e408 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.134019] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f69cae-218e-460f-aa0f-86b80c627ba2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.164169] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc1d2c2-1f16-4b4b-9a3d-ddca160692e7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.170938] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a344ed-511f-49c7-b8d6-673b179d0f80 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.184047] env[69992]: DEBUG nova.compute.provider_tree [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1820.686598] env[69992]: DEBUG nova.scheduler.client.report [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1821.191346] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1821.191864] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1821.194570] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.083s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1821.194667] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1821.194778] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1821.195845] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d73be75-b428-4669-af68-1acba8dac673 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.203929] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269bd46b-68e7-4933-94c6-69feafc6bb3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.217554] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44df8d4e-2a47-4c99-854b-233c986fb724 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.224008] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1177f088-6fb7-42e5-b6e2-fd09749b678a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.253582] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180941MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1821.253732] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1821.253935] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1821.697637] env[69992]: DEBUG nova.compute.utils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1821.699444] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1821.699692] env[69992]: DEBUG nova.network.neutron [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1821.745380] env[69992]: DEBUG nova.policy [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99bd7545f7d04aa28e625ce6c5491bb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '122bc9ffa8f54a34af6047517fab0a9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1822.022287] env[69992]: DEBUG nova.network.neutron [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Successfully created port: 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1822.203286] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1822.272591] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e488d413-653b-4730-b7d9-1db8320e9c6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.272798] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1822.272957] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1822.296602] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34cf7a4-f346-467a-8c4c-5494b66c012f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.304476] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b79324-cb39-4f84-87c1-bc8535f3c5ab {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.335065] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e658ab85-5ff4-4558-ae54-984905b2dbf7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.341629] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b677ff9a-5317-40c2-bf2b-ac7d5ad8c28c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.354355] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.857141] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1823.215158] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1823.245343] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1823.245584] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1823.245740] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1823.245921] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1823.246082] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1823.246236] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1823.246441] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1823.246595] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1823.246757] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1823.246918] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1823.247102] env[69992]: DEBUG nova.virt.hardware [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1823.248035] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c80a4d1-bfbf-4cb6-929a-208c44537adc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.256025] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0098c7fe-724b-4097-a6b7-6ae625ab158d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.362092] env[69992]: DEBUG nova.compute.manager [req-c951bab5-6185-4ab0-8e06-308670e17b1d req-b9fcbff9-003b-48e0-a6e1-d7f795613728 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Received event network-vif-plugged-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1823.362323] env[69992]: DEBUG oslo_concurrency.lockutils [req-c951bab5-6185-4ab0-8e06-308670e17b1d req-b9fcbff9-003b-48e0-a6e1-d7f795613728 service nova] Acquiring lock "e488d413-653b-4730-b7d9-1db8320e9c6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1823.362525] env[69992]: DEBUG oslo_concurrency.lockutils [req-c951bab5-6185-4ab0-8e06-308670e17b1d req-b9fcbff9-003b-48e0-a6e1-d7f795613728 service nova] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1823.362815] env[69992]: DEBUG oslo_concurrency.lockutils [req-c951bab5-6185-4ab0-8e06-308670e17b1d req-b9fcbff9-003b-48e0-a6e1-d7f795613728 service nova] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1823.362881] env[69992]: DEBUG nova.compute.manager [req-c951bab5-6185-4ab0-8e06-308670e17b1d req-b9fcbff9-003b-48e0-a6e1-d7f795613728 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] No waiting events found dispatching network-vif-plugged-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1823.363087] env[69992]: WARNING nova.compute.manager [req-c951bab5-6185-4ab0-8e06-308670e17b1d req-b9fcbff9-003b-48e0-a6e1-d7f795613728 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Received unexpected event network-vif-plugged-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 for instance with vm_state building and task_state spawning. [ 1823.370315] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1823.370481] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.117s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1823.440242] env[69992]: DEBUG nova.network.neutron [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Successfully updated port: 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1823.946154] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.946316] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1823.946471] env[69992]: DEBUG nova.network.neutron [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1824.480035] env[69992]: DEBUG nova.network.neutron [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1824.592756] env[69992]: DEBUG nova.network.neutron [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [{"id": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "address": "fa:16:3e:9d:fd:c1", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c8fa29-fa", "ovs_interfaceid": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.095677] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1825.096115] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Instance network_info: |[{"id": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "address": "fa:16:3e:9d:fd:c1", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c8fa29-fa", "ovs_interfaceid": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1825.096640] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:fd:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89c8fa29-faa7-447f-8ad6-5ebcd21e1c77', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.104255] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1825.104453] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1825.104665] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8fb8ea1-8add-41f5-ad71-e1f0c79b2d2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.126017] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.126017] env[69992]: value = "task-2898344" [ 1825.126017] env[69992]: _type = "Task" [ 1825.126017] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.134443] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898344, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.371384] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.371569] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1825.371722] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1825.390090] env[69992]: DEBUG nova.compute.manager [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Received event network-changed-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1825.390243] env[69992]: DEBUG nova.compute.manager [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Refreshing instance network info cache due to event network-changed-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1825.390414] env[69992]: DEBUG oslo_concurrency.lockutils [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] Acquiring lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.390559] env[69992]: DEBUG oslo_concurrency.lockutils [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] Acquired lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1825.390718] env[69992]: DEBUG nova.network.neutron [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Refreshing network info cache for port 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.635580] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898344, 'name': CreateVM_Task, 'duration_secs': 0.300402} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.635966] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1825.636407] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.636563] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1825.636900] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1825.637160] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c8b145-257a-4efb-8f03-3bdb60e7b8e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.642294] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1825.642294] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5290d467-7f10-452d-9349-634111ea4eb4" [ 1825.642294] env[69992]: _type = "Task" [ 1825.642294] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.649657] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5290d467-7f10-452d-9349-634111ea4eb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.064460] env[69992]: DEBUG nova.network.neutron [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updated VIF entry in instance network info cache for port 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.064823] env[69992]: DEBUG nova.network.neutron [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [{"id": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "address": "fa:16:3e:9d:fd:c1", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c8fa29-fa", "ovs_interfaceid": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.152401] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5290d467-7f10-452d-9349-634111ea4eb4, 'name': SearchDatastore_Task, 'duration_secs': 0.012985} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.152682] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1826.152905] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.153185] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.153345] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1826.153526] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.153769] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31d3f672-163e-4ee9-bb5e-d10034c8ec2a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.161616] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.161783] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1826.162513] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11e433da-39d0-408a-a7c5-6b4889aea156 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.167282] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1826.167282] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]524fbf43-bb45-88c9-9471-c87e2ab415bc" [ 1826.167282] env[69992]: _type = "Task" [ 1826.167282] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.174447] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524fbf43-bb45-88c9-9471-c87e2ab415bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.567874] env[69992]: DEBUG oslo_concurrency.lockutils [req-ab04eda3-b335-4f98-a72b-689c4caf16bd req-3464668e-068e-426e-96aa-162f68a12fa9 service nova] Releasing lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1826.677248] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]524fbf43-bb45-88c9-9471-c87e2ab415bc, 'name': SearchDatastore_Task, 'duration_secs': 0.010796} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.677975] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd4d305d-1f42-4c78-90a6-fae91cc8969e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.682443] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1826.682443] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]521e84b5-19d6-9c8b-57e8-87250a6a9d36" [ 1826.682443] env[69992]: _type = "Task" [ 1826.682443] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.689340] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521e84b5-19d6-9c8b-57e8-87250a6a9d36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.192057] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]521e84b5-19d6-9c8b-57e8-87250a6a9d36, 'name': SearchDatastore_Task, 'duration_secs': 0.009475} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.192332] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1827.192613] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e488d413-653b-4730-b7d9-1db8320e9c6b/e488d413-653b-4730-b7d9-1db8320e9c6b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1827.192860] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-721d1aed-8b36-4bef-b9cc-f7a729c1ed41 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.200661] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1827.200661] env[69992]: value = "task-2898345" [ 1827.200661] env[69992]: _type = "Task" [ 1827.200661] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.208783] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.710135] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898345, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.211439] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898345, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.711741] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898345, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.441154} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.712127] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] e488d413-653b-4730-b7d9-1db8320e9c6b/e488d413-653b-4730-b7d9-1db8320e9c6b.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1828.712253] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1828.712442] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fe35c97-febc-4130-b521-b89c2e53ba73 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.717674] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1828.717674] env[69992]: value = "task-2898346" [ 1828.717674] env[69992]: _type = "Task" [ 1828.717674] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.725432] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.227764] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061902} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.228041] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1829.228766] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a84c63-a747-4163-9c83-c89462999fda {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.249933] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] e488d413-653b-4730-b7d9-1db8320e9c6b/e488d413-653b-4730-b7d9-1db8320e9c6b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.250156] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b885c464-f74a-4ddf-be1d-f7f17b09a630 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.268539] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1829.268539] env[69992]: value = "task-2898347" [ 1829.268539] env[69992]: _type = "Task" [ 1829.268539] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.275896] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.778668] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898347, 'name': ReconfigVM_Task, 'duration_secs': 0.252757} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.779093] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Reconfigured VM instance instance-0000007e to attach disk [datastore1] e488d413-653b-4730-b7d9-1db8320e9c6b/e488d413-653b-4730-b7d9-1db8320e9c6b.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1829.779654] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-882bb787-7e95-4291-8067-533fa2066225 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.785903] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1829.785903] env[69992]: value = "task-2898348" [ 1829.785903] env[69992]: _type = "Task" [ 1829.785903] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.793156] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898348, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.295207] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898348, 'name': Rename_Task, 'duration_secs': 0.13043} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.295492] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1830.295740] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11d57eb1-5344-4369-870f-2606fc046465 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.302140] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1830.302140] env[69992]: value = "task-2898349" [ 1830.302140] env[69992]: _type = "Task" [ 1830.302140] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.309133] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.812745] env[69992]: DEBUG oslo_vmware.api [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898349, 'name': PowerOnVM_Task, 'duration_secs': 0.421875} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.813133] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1830.813211] env[69992]: INFO nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1830.813387] env[69992]: DEBUG nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1830.814150] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa84c32-7bbf-4893-a212-77ab44b1ab79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.332029] env[69992]: INFO nova.compute.manager [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Took 12.25 seconds to build instance. [ 1831.833540] env[69992]: DEBUG oslo_concurrency.lockutils [None req-9389b9b0-ee34-4f4d-a86c-ab27aafa53ce tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.763s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1832.092930] env[69992]: DEBUG nova.compute.manager [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Received event network-changed-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1832.093147] env[69992]: DEBUG nova.compute.manager [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Refreshing instance network info cache due to event network-changed-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1832.093418] env[69992]: DEBUG oslo_concurrency.lockutils [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] Acquiring lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.093578] env[69992]: DEBUG oslo_concurrency.lockutils [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] Acquired lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1832.093745] env[69992]: DEBUG nova.network.neutron [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Refreshing network info cache for port 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1832.800254] env[69992]: DEBUG nova.network.neutron [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updated VIF entry in instance network info cache for port 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.800623] env[69992]: DEBUG nova.network.neutron [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [{"id": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "address": "fa:16:3e:9d:fd:c1", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c8fa29-fa", "ovs_interfaceid": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.303978] env[69992]: DEBUG oslo_concurrency.lockutils [req-ab9f1341-10d5-4f27-8e5d-a4b5c24b4ec7 req-0766b803-0b7d-4dbd-a01a-a2c9551c7d87 service nova] Releasing lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1868.659052] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "e488d413-653b-4730-b7d9-1db8320e9c6b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1868.659052] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1868.659052] env[69992]: DEBUG nova.compute.manager [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1868.659920] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2439d3c-7d04-460a-bb9a-14c5fe966670 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.666715] env[69992]: DEBUG nova.compute.manager [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1868.667273] env[69992]: DEBUG nova.objects.instance [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'flavor' on Instance uuid e488d413-653b-4730-b7d9-1db8320e9c6b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.676849] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1869.677370] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d8b4ec6-8e98-4109-86fd-30d23bbe889d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.684823] env[69992]: DEBUG oslo_vmware.api [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1869.684823] env[69992]: value = "task-2898350" [ 1869.684823] env[69992]: _type = "Task" [ 1869.684823] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.692603] env[69992]: DEBUG oslo_vmware.api [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.194547] env[69992]: DEBUG oslo_vmware.api [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898350, 'name': PowerOffVM_Task, 'duration_secs': 0.18288} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.194828] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1870.195052] env[69992]: DEBUG nova.compute.manager [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1870.195789] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd0deb1-b825-447b-bd72-bc42b1e96740 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.706835] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fbf122b9-d142-4860-8c0a-dee1c4d6bd8b tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1870.998814] env[69992]: DEBUG nova.objects.instance [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'flavor' on Instance uuid e488d413-653b-4730-b7d9-1db8320e9c6b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1871.505303] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.505517] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1871.505691] env[69992]: DEBUG nova.network.neutron [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1871.505799] env[69992]: DEBUG nova.objects.instance [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'info_cache' on Instance uuid e488d413-653b-4730-b7d9-1db8320e9c6b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1872.010122] env[69992]: DEBUG nova.objects.base [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1872.703923] env[69992]: DEBUG nova.network.neutron [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [{"id": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "address": "fa:16:3e:9d:fd:c1", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c8fa29-fa", "ovs_interfaceid": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.206958] env[69992]: DEBUG oslo_concurrency.lockutils [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1874.213101] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1874.213434] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d5070cb-ee42-4bfe-9c7c-66cf4b1023f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.220586] env[69992]: DEBUG oslo_vmware.api [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1874.220586] env[69992]: value = "task-2898351" [ 1874.220586] env[69992]: _type = "Task" [ 1874.220586] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.228263] env[69992]: DEBUG oslo_vmware.api [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.730922] env[69992]: DEBUG oslo_vmware.api [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898351, 'name': PowerOnVM_Task, 'duration_secs': 0.372055} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.731224] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1874.731429] env[69992]: DEBUG nova.compute.manager [None req-0085d4b6-fbed-4b7b-a674-26e952f9520e tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1874.732186] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14e8ba1-abe1-4c05-bc76-9d3a5a2bfa34 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.698815] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41d215d-1f95-4d70-8308-7bb55fc3eca1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.705418] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Suspending the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1875.705642] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-6cc2120e-33c7-48a0-8252-51ebedb9e707 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.712706] env[69992]: DEBUG oslo_vmware.api [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1875.712706] env[69992]: value = "task-2898352" [ 1875.712706] env[69992]: _type = "Task" [ 1875.712706] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.720879] env[69992]: DEBUG oslo_vmware.api [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898352, 'name': SuspendVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.223736] env[69992]: DEBUG oslo_vmware.api [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898352, 'name': SuspendVM_Task} progress is 75%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.610302] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.723834] env[69992]: DEBUG oslo_vmware.api [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898352, 'name': SuspendVM_Task, 'duration_secs': 0.637788} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.724157] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Suspended the VM {{(pid=69992) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1876.724307] env[69992]: DEBUG nova.compute.manager [None req-42375ba2-6fc8-4368-8aa2-15cbec898ad9 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1876.724996] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10cee22-367f-4996-bed4-9de19cc3d5a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.026784] env[69992]: INFO nova.compute.manager [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Resuming [ 1878.028047] env[69992]: DEBUG nova.objects.instance [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'flavor' on Instance uuid e488d413-653b-4730-b7d9-1db8320e9c6b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1878.604773] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1878.609670] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1878.609890] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1878.610099] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1879.113107] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1879.113465] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1879.113535] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1879.113687] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1879.114592] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33a7971-e899-4f2c-afed-afa257509399 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.122812] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d11daab-eeb8-4626-a21c-3e1f90a269fc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.135867] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8049c9f9-359c-43af-9321-b05c46476674 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.141552] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229e242d-9cdb-4b91-a7b8-3364cc313be8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.169715] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180901MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1879.169833] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1879.170039] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1879.537792] env[69992]: DEBUG oslo_concurrency.lockutils [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.537985] env[69992]: DEBUG oslo_concurrency.lockutils [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquired lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1879.538187] env[69992]: DEBUG nova.network.neutron [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1880.194715] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e488d413-653b-4730-b7d9-1db8320e9c6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1880.194715] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1880.194715] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1880.220576] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f584cc9-2506-495c-b5a4-107c510d404f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.228187] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4bda18-489a-49c2-baf7-fd535cfc2b96 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.258410] env[69992]: DEBUG nova.network.neutron [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [{"id": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "address": "fa:16:3e:9d:fd:c1", "network": {"id": "826ed58d-8ddf-4cf4-9252-c22d4c4c757c", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1452656855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "122bc9ffa8f54a34af6047517fab0a9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c8fa29-fa", "ovs_interfaceid": "89c8fa29-faa7-447f-8ad6-5ebcd21e1c77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.259945] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aea05cc-8328-45e6-89ea-08a7adae4ac9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.266774] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6241e037-f9f9-49a3-8a00-0b9731abb84f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.280336] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.763580] env[69992]: DEBUG oslo_concurrency.lockutils [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Releasing lock "refresh_cache-e488d413-653b-4730-b7d9-1db8320e9c6b" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1880.764564] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8009ca4-a433-47eb-806e-76b11ac9b61f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.771303] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Resuming the VM {{(pid=69992) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1880.771524] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cd524e8-9718-4123-a98f-4353c612d307 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.777444] env[69992]: DEBUG oslo_vmware.api [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1880.777444] env[69992]: value = "task-2898353" [ 1880.777444] env[69992]: _type = "Task" [ 1880.777444] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.785009] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1880.787888] env[69992]: DEBUG oslo_vmware.api [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898353, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.287298] env[69992]: DEBUG oslo_vmware.api [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898353, 'name': PowerOnVM_Task, 'duration_secs': 0.498184} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.287724] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Resumed the VM {{(pid=69992) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1881.287766] env[69992]: DEBUG nova.compute.manager [None req-52dae640-9f6f-4b07-b59a-5740e0181f86 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1881.288640] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63acd0f8-7c29-4407-9e30-c73ddf55ee18 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.291686] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1881.291874] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.122s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1882.291922] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1882.291922] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1882.291922] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1882.688521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "e488d413-653b-4730-b7d9-1db8320e9c6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1882.688521] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1882.688692] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "e488d413-653b-4730-b7d9-1db8320e9c6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1882.688871] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1882.689067] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1882.691141] env[69992]: INFO nova.compute.manager [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Terminating instance [ 1883.195186] env[69992]: DEBUG nova.compute.manager [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1883.195449] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1883.196369] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b3e7ce-16a5-483f-9e15-f12e192245c3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.204332] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1883.204528] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84854dc7-d47f-444e-a4d4-7ba854389a99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.212193] env[69992]: DEBUG oslo_vmware.api [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1883.212193] env[69992]: value = "task-2898354" [ 1883.212193] env[69992]: _type = "Task" [ 1883.212193] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.220943] env[69992]: DEBUG oslo_vmware.api [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.721663] env[69992]: DEBUG oslo_vmware.api [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898354, 'name': PowerOffVM_Task, 'duration_secs': 0.190852} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.721994] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1883.722059] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1883.722276] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d14d848-cf57-4a52-b9ea-cef799620084 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.789643] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1883.789954] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1883.790173] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleting the datastore file [datastore1] e488d413-653b-4730-b7d9-1db8320e9c6b {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1883.790451] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36aaf4c2-d14b-4221-8718-84c485c80f35 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.797511] env[69992]: DEBUG oslo_vmware.api [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for the task: (returnval){ [ 1883.797511] env[69992]: value = "task-2898356" [ 1883.797511] env[69992]: _type = "Task" [ 1883.797511] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.804891] env[69992]: DEBUG oslo_vmware.api [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.306820] env[69992]: DEBUG oslo_vmware.api [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Task: {'id': task-2898356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146589} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.307083] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1884.307277] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1884.307465] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1884.307639] env[69992]: INFO nova.compute.manager [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1884.307871] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1884.308065] env[69992]: DEBUG nova.compute.manager [-] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1884.308160] env[69992]: DEBUG nova.network.neutron [-] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1884.732520] env[69992]: DEBUG nova.compute.manager [req-bfba94ed-9571-4ab1-b9b8-46c1c40fbd53 req-b3e9ec14-1deb-435d-aaf8-919444993af6 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Received event network-vif-deleted-89c8fa29-faa7-447f-8ad6-5ebcd21e1c77 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1884.732767] env[69992]: INFO nova.compute.manager [req-bfba94ed-9571-4ab1-b9b8-46c1c40fbd53 req-b3e9ec14-1deb-435d-aaf8-919444993af6 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Neutron deleted interface 89c8fa29-faa7-447f-8ad6-5ebcd21e1c77; detaching it from the instance and deleting it from the info cache [ 1884.732933] env[69992]: DEBUG nova.network.neutron [req-bfba94ed-9571-4ab1-b9b8-46c1c40fbd53 req-b3e9ec14-1deb-435d-aaf8-919444993af6 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.210917] env[69992]: DEBUG nova.network.neutron [-] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.235299] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55a4ad5b-2ddb-4d1a-a6f7-b38af64210c4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.245725] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0636fc-da2f-46f8-8fc3-e6216b45cd9b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.270142] env[69992]: DEBUG nova.compute.manager [req-bfba94ed-9571-4ab1-b9b8-46c1c40fbd53 req-b3e9ec14-1deb-435d-aaf8-919444993af6 service nova] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Detach interface failed, port_id=89c8fa29-faa7-447f-8ad6-5ebcd21e1c77, reason: Instance e488d413-653b-4730-b7d9-1db8320e9c6b could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1885.609673] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1885.713564] env[69992]: INFO nova.compute.manager [-] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Took 1.41 seconds to deallocate network for instance. [ 1886.219403] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1886.219796] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1886.219928] env[69992]: DEBUG nova.objects.instance [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lazy-loading 'resources' on Instance uuid e488d413-653b-4730-b7d9-1db8320e9c6b {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.754283] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70a9df8-ae72-476c-8bc0-c4dc525619f9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.763685] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5531dd85-7c85-4be4-9ad6-a8d8a9e5e6cd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.792875] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee976099-5bf9-44e6-9075-18e50da9f3d8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.799931] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72194a84-443b-4fee-80b6-e3955810790f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.812652] env[69992]: DEBUG nova.compute.provider_tree [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1887.315395] env[69992]: DEBUG nova.scheduler.client.report [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1887.820419] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1887.841749] env[69992]: INFO nova.scheduler.client.report [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Deleted allocations for instance e488d413-653b-4730-b7d9-1db8320e9c6b [ 1888.349213] env[69992]: DEBUG oslo_concurrency.lockutils [None req-ff121cc0-481b-47d1-a8d3-9c3728ef8308 tempest-ServerActionsTestJSON-1992076937 tempest-ServerActionsTestJSON-1992076937-project-member] Lock "e488d413-653b-4730-b7d9-1db8320e9c6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.661s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1894.373071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1894.373368] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1894.876179] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1895.398695] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1895.398962] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1895.400558] env[69992]: INFO nova.compute.claims [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1896.438408] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfae169-5a6c-4be8-8788-7b8a3fa249ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.445563] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f49805-f8ae-4102-8f73-fa3fd7f5072d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.474192] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3854e474-b8b0-4c5b-bbd0-51d591d89082 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.480805] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf2b996-5996-445e-ad1f-9ba781ddde8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.494163] env[69992]: DEBUG nova.compute.provider_tree [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.605264] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1896.997579] env[69992]: DEBUG nova.scheduler.client.report [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1897.502714] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.104s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1897.503272] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1898.008790] env[69992]: DEBUG nova.compute.utils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1898.011841] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1898.011841] env[69992]: DEBUG nova.network.neutron [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1898.048609] env[69992]: DEBUG nova.policy [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aee5449aa2b43f9b852cae59e547f39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7e8c96af882413d886198a7b64b474f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 1898.302462] env[69992]: DEBUG nova.network.neutron [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Successfully created port: 2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1898.514914] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1899.525769] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1899.551328] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1899.551584] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1899.551745] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1899.551935] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1899.552114] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1899.552271] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1899.552978] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1899.552978] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1899.552978] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1899.552978] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1899.553377] env[69992]: DEBUG nova.virt.hardware [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1899.553980] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb07dbf4-6e84-4eff-ac73-8958f5185b7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.562050] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c38c4a6-bd58-41be-b064-a57b949d88e6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.654264] env[69992]: DEBUG nova.compute.manager [req-3b6da292-d958-40c4-8113-44259f7020c8 req-155a3a44-1782-482b-9553-ea9ed9ac5f72 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Received event network-vif-plugged-2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1899.654484] env[69992]: DEBUG oslo_concurrency.lockutils [req-3b6da292-d958-40c4-8113-44259f7020c8 req-155a3a44-1782-482b-9553-ea9ed9ac5f72 service nova] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1899.654681] env[69992]: DEBUG oslo_concurrency.lockutils [req-3b6da292-d958-40c4-8113-44259f7020c8 req-155a3a44-1782-482b-9553-ea9ed9ac5f72 service nova] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1899.654891] env[69992]: DEBUG oslo_concurrency.lockutils [req-3b6da292-d958-40c4-8113-44259f7020c8 req-155a3a44-1782-482b-9553-ea9ed9ac5f72 service nova] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1899.655012] env[69992]: DEBUG nova.compute.manager [req-3b6da292-d958-40c4-8113-44259f7020c8 req-155a3a44-1782-482b-9553-ea9ed9ac5f72 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] No waiting events found dispatching network-vif-plugged-2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1899.655191] env[69992]: WARNING nova.compute.manager [req-3b6da292-d958-40c4-8113-44259f7020c8 req-155a3a44-1782-482b-9553-ea9ed9ac5f72 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Received unexpected event network-vif-plugged-2ed16f5a-d5d8-447c-aaa2-c02231e5af66 for instance with vm_state building and task_state spawning. [ 1899.737178] env[69992]: DEBUG nova.network.neutron [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Successfully updated port: 2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1900.242433] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.242591] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1900.242742] env[69992]: DEBUG nova.network.neutron [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1900.774286] env[69992]: DEBUG nova.network.neutron [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1900.892271] env[69992]: DEBUG nova.network.neutron [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [{"id": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "address": "fa:16:3e:5d:c3:32", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ed16f5a-d5", "ovs_interfaceid": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.396331] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1901.396653] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Instance network_info: |[{"id": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "address": "fa:16:3e:5d:c3:32", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ed16f5a-d5", "ovs_interfaceid": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1901.397094] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:c3:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ed16f5a-d5d8-447c-aaa2-c02231e5af66', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.404396] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Creating folder: Project (b7e8c96af882413d886198a7b64b474f). Parent ref: group-v581821. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1901.404657] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0d62b1c-df53-42b5-ad68-003fdb581673 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.416822] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Created folder: Project (b7e8c96af882413d886198a7b64b474f) in parent group-v581821. [ 1901.416992] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Creating folder: Instances. Parent ref: group-v582164. {{(pid=69992) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1901.417211] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2aa60a4-e516-4270-a9e0-51c4bab4637d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.427095] env[69992]: INFO nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Created folder: Instances in parent group-v582164. [ 1901.427309] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1901.427479] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1901.427655] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79f69391-270a-47e4-8911-d594fbb070c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.445243] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.445243] env[69992]: value = "task-2898359" [ 1901.445243] env[69992]: _type = "Task" [ 1901.445243] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.451988] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898359, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.679886] env[69992]: DEBUG nova.compute.manager [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Received event network-changed-2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1901.680127] env[69992]: DEBUG nova.compute.manager [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Refreshing instance network info cache due to event network-changed-2ed16f5a-d5d8-447c-aaa2-c02231e5af66. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1901.680324] env[69992]: DEBUG oslo_concurrency.lockutils [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] Acquiring lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.680460] env[69992]: DEBUG oslo_concurrency.lockutils [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] Acquired lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1901.680619] env[69992]: DEBUG nova.network.neutron [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Refreshing network info cache for port 2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.954842] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898359, 'name': CreateVM_Task, 'duration_secs': 0.301} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.955186] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1901.955653] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.955861] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1901.956211] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1901.956463] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a500d50-4b33-43ad-9944-375765aac3ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.960614] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1901.960614] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]5284703b-181b-4dfb-1141-ae6df04da10b" [ 1901.960614] env[69992]: _type = "Task" [ 1901.960614] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.967653] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5284703b-181b-4dfb-1141-ae6df04da10b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.354208] env[69992]: DEBUG nova.network.neutron [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updated VIF entry in instance network info cache for port 2ed16f5a-d5d8-447c-aaa2-c02231e5af66. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1902.354570] env[69992]: DEBUG nova.network.neutron [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [{"id": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "address": "fa:16:3e:5d:c3:32", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ed16f5a-d5", "ovs_interfaceid": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.470881] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]5284703b-181b-4dfb-1141-ae6df04da10b, 'name': SearchDatastore_Task, 'duration_secs': 0.010841} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.471146] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1902.471382] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.471616] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.471761] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1902.471942] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1902.472203] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5539015a-8198-4a47-a1d0-7412bf00e9a9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.481151] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1902.481319] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1902.481970] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a83137f-e471-4599-b2aa-1584167141b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.486407] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1902.486407] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]52013331-5821-6b22-b1f4-83a6061e06ec" [ 1902.486407] env[69992]: _type = "Task" [ 1902.486407] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.493917] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52013331-5821-6b22-b1f4-83a6061e06ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.857202] env[69992]: DEBUG oslo_concurrency.lockutils [req-cc62ace6-b73f-4f94-b0d7-c949a70ec27f req-4ba92efb-53db-4792-bbb9-d4b35c190481 service nova] Releasing lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1902.997566] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]52013331-5821-6b22-b1f4-83a6061e06ec, 'name': SearchDatastore_Task, 'duration_secs': 0.008417} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.998423] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f749e92-2155-4743-9483-b23be3df5eef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.003538] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1903.003538] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525f2281-b40c-3213-bc4f-21489570ba8d" [ 1903.003538] env[69992]: _type = "Task" [ 1903.003538] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.010825] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525f2281-b40c-3213-bc4f-21489570ba8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.514384] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525f2281-b40c-3213-bc4f-21489570ba8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.514633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1903.514948] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] db23bdaa-b33d-4897-99ce-cdaa41a1758e/db23bdaa-b33d-4897-99ce-cdaa41a1758e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1903.515223] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea294a4b-9bdc-4eae-b361-6b19c3e6462f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.521824] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1903.521824] env[69992]: value = "task-2898360" [ 1903.521824] env[69992]: _type = "Task" [ 1903.521824] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.528937] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.031840] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452844} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.032236] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore1] db23bdaa-b33d-4897-99ce-cdaa41a1758e/db23bdaa-b33d-4897-99ce-cdaa41a1758e.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1904.032326] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1904.032555] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc308bdc-f150-4d3f-a32a-4252f75807ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.038664] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1904.038664] env[69992]: value = "task-2898361" [ 1904.038664] env[69992]: _type = "Task" [ 1904.038664] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.045637] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.548346] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070579} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.548608] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1904.549452] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066311c0-bc44-4242-abb7-2dbdb895497f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.571664] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] db23bdaa-b33d-4897-99ce-cdaa41a1758e/db23bdaa-b33d-4897-99ce-cdaa41a1758e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1904.571892] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e3f2b7e-60d8-49c6-8a0f-6be3bfae877e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.592287] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1904.592287] env[69992]: value = "task-2898362" [ 1904.592287] env[69992]: _type = "Task" [ 1904.592287] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.603055] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.102408] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898362, 'name': ReconfigVM_Task, 'duration_secs': 0.272877} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.102802] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Reconfigured VM instance instance-0000007f to attach disk [datastore1] db23bdaa-b33d-4897-99ce-cdaa41a1758e/db23bdaa-b33d-4897-99ce-cdaa41a1758e.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1905.103310] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f434dbe-97ab-4706-89ec-511920091d68 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.109459] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1905.109459] env[69992]: value = "task-2898363" [ 1905.109459] env[69992]: _type = "Task" [ 1905.109459] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.116866] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898363, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.619210] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898363, 'name': Rename_Task, 'duration_secs': 0.141676} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.619465] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1905.619699] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2010013-cd9d-4f4d-ba6c-0ae302ebf1a1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.625639] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1905.625639] env[69992]: value = "task-2898364" [ 1905.625639] env[69992]: _type = "Task" [ 1905.625639] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.632558] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.135668] env[69992]: DEBUG oslo_vmware.api [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898364, 'name': PowerOnVM_Task, 'duration_secs': 0.430044} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.136094] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1906.136237] env[69992]: INFO nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Took 6.61 seconds to spawn the instance on the hypervisor. [ 1906.136359] env[69992]: DEBUG nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1906.137107] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901f8365-8bea-4caa-8b4c-c3ffc18d2138 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.653154] env[69992]: INFO nova.compute.manager [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Took 11.27 seconds to build instance. [ 1907.155318] env[69992]: DEBUG oslo_concurrency.lockutils [None req-214b7e9c-bfaf-4001-aa10-ad6babf7460d tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.782s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1907.279640] env[69992]: DEBUG nova.compute.manager [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Received event network-changed-2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1907.279768] env[69992]: DEBUG nova.compute.manager [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Refreshing instance network info cache due to event network-changed-2ed16f5a-d5d8-447c-aaa2-c02231e5af66. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1907.281049] env[69992]: DEBUG oslo_concurrency.lockutils [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] Acquiring lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.281049] env[69992]: DEBUG oslo_concurrency.lockutils [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] Acquired lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1907.281049] env[69992]: DEBUG nova.network.neutron [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Refreshing network info cache for port 2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1907.973479] env[69992]: DEBUG nova.network.neutron [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updated VIF entry in instance network info cache for port 2ed16f5a-d5d8-447c-aaa2-c02231e5af66. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1907.973856] env[69992]: DEBUG nova.network.neutron [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [{"id": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "address": "fa:16:3e:5d:c3:32", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ed16f5a-d5", "ovs_interfaceid": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.477162] env[69992]: DEBUG oslo_concurrency.lockutils [req-b5a4b860-9761-4134-9d95-67d09a13cb0d req-5b50bf9b-3282-42a3-9648-e804690379d9 service nova] Releasing lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1937.609428] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1938.609260] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1938.609499] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.112885] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1939.113167] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1939.113343] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1939.113497] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1939.114419] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48360a13-9a32-4b21-b7ab-6085a939da5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.122358] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5553cf5-6c41-4336-9ee3-296d1531bb7f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.135599] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352d4489-907e-4bfd-a5cb-b2bf857b3d63 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.141742] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45dc4025-d4cf-4862-bd53-e183ada45a32 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.170404] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181131MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1939.170546] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1939.170754] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1940.196333] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance db23bdaa-b33d-4897-99ce-cdaa41a1758e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.196668] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1940.196739] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1940.212561] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing inventories for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1940.225021] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating ProviderTree inventory for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1940.226040] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Updating inventory in ProviderTree for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1940.235812] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing aggregate associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, aggregates: None {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1940.252348] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Refreshing trait associations for resource provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=69992) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1940.275305] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30306f7-9fde-4f43-a861-503d60b7da79 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.282718] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc32da2b-f7ce-4aed-a868-ec077415a64a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.312319] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417b3ace-7047-41dd-826f-11d16d09adaa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.319155] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0578a2-16af-4e22-85b1-553430f69627 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.331969] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1940.835617] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1941.340236] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1941.340592] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.170s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1942.336527] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1942.336742] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1942.336932] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.609656] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.610038] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1944.374877] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1944.375124] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1944.878614] env[69992]: DEBUG nova.compute.utils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1945.382351] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1946.438828] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1946.439201] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1946.439355] env[69992]: INFO nova.compute.manager [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Attaching volume 6631b867-8b09-4854-9b7f-a03cb67de5e8 to /dev/sdb [ 1946.469067] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e279f8-51d2-428a-883a-870e9b246ef4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.477469] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571817e9-f3d4-4e22-84f3-c5d73b0dcca6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.490819] env[69992]: DEBUG nova.virt.block_device [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating existing volume attachment record: 593afcda-e97d-4fef-8102-436bf5d3dd56 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1947.610522] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.034097] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1951.034359] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582168', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'name': 'volume-6631b867-8b09-4854-9b7f-a03cb67de5e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db23bdaa-b33d-4897-99ce-cdaa41a1758e', 'attached_at': '', 'detached_at': '', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'serial': '6631b867-8b09-4854-9b7f-a03cb67de5e8'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1951.036148] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea6f6ad-8171-4172-9fa5-9ad0e431b0f1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.051871] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50f71fb-1e81-4ae9-9683-02d945ef2b2b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.075303] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] volume-6631b867-8b09-4854-9b7f-a03cb67de5e8/volume-6631b867-8b09-4854-9b7f-a03cb67de5e8.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1951.075522] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbcd6329-784b-4776-803b-7d58390785de {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.093782] env[69992]: DEBUG oslo_vmware.api [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1951.093782] env[69992]: value = "task-2898369" [ 1951.093782] env[69992]: _type = "Task" [ 1951.093782] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.103031] env[69992]: DEBUG oslo_vmware.api [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898369, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.603462] env[69992]: DEBUG oslo_vmware.api [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898369, 'name': ReconfigVM_Task, 'duration_secs': 0.356066} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.603735] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Reconfigured VM instance instance-0000007f to attach disk [datastore2] volume-6631b867-8b09-4854-9b7f-a03cb67de5e8/volume-6631b867-8b09-4854-9b7f-a03cb67de5e8.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1951.608430] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26054721-8d14-417e-ab13-a4ecac865f9e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.622348] env[69992]: DEBUG oslo_vmware.api [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1951.622348] env[69992]: value = "task-2898370" [ 1951.622348] env[69992]: _type = "Task" [ 1951.622348] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.630854] env[69992]: DEBUG oslo_vmware.api [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.131861] env[69992]: DEBUG oslo_vmware.api [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898370, 'name': ReconfigVM_Task, 'duration_secs': 0.132177} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.132180] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582168', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'name': 'volume-6631b867-8b09-4854-9b7f-a03cb67de5e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db23bdaa-b33d-4897-99ce-cdaa41a1758e', 'attached_at': '', 'detached_at': '', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'serial': '6631b867-8b09-4854-9b7f-a03cb67de5e8'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1953.167027] env[69992]: DEBUG nova.objects.instance [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1953.673258] env[69992]: DEBUG oslo_concurrency.lockutils [None req-63b8c980-77d3-48b4-8aec-f2333974edf2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.234s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1953.757238] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1953.757486] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1953.757688] env[69992]: DEBUG nova.compute.manager [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1953.758591] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88091c5-7e3d-4252-8c0a-efbee3425cf7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.765633] env[69992]: DEBUG nova.compute.manager [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1953.766182] env[69992]: DEBUG nova.objects.instance [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1954.772419] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1954.772756] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2d99716-a919-4314-b86f-bf40956cecb7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.779730] env[69992]: DEBUG oslo_vmware.api [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1954.779730] env[69992]: value = "task-2898371" [ 1954.779730] env[69992]: _type = "Task" [ 1954.779730] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.787611] env[69992]: DEBUG oslo_vmware.api [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.289726] env[69992]: DEBUG oslo_vmware.api [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898371, 'name': PowerOffVM_Task, 'duration_secs': 0.206764} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.290072] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1955.290222] env[69992]: DEBUG nova.compute.manager [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1955.290957] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe68af82-b9d0-4c6c-9b66-f17fa09ab302 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.801392] env[69992]: DEBUG oslo_concurrency.lockutils [None req-84b78422-7f44-4640-a964-ed6699b0e024 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.044s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1956.128624] env[69992]: DEBUG nova.objects.instance [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1956.633843] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.634044] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1956.634259] env[69992]: DEBUG nova.network.neutron [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1956.634452] env[69992]: DEBUG nova.objects.instance [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'info_cache' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.137371] env[69992]: DEBUG nova.objects.base [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1957.830123] env[69992]: DEBUG nova.network.neutron [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [{"id": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "address": "fa:16:3e:5d:c3:32", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ed16f5a-d5", "ovs_interfaceid": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.333163] env[69992]: DEBUG oslo_concurrency.lockutils [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1959.340331] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1959.340683] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff127e0a-dfb8-41c4-a6f4-3fb83b80ac2a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.349159] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1959.349159] env[69992]: value = "task-2898372" [ 1959.349159] env[69992]: _type = "Task" [ 1959.349159] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.357419] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.858841] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.359028] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.859721] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.361027] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.861211] env[69992]: DEBUG oslo_vmware.api [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898372, 'name': PowerOnVM_Task, 'duration_secs': 2.454495} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.861478] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1961.861682] env[69992]: DEBUG nova.compute.manager [None req-b733bdea-2c45-45cb-94fc-b44fa9edbc37 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1961.862436] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c002be9-5562-42b8-8636-8d6ea2e3fde1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.609905] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.262464] env[69992]: DEBUG oslo_concurrency.lockutils [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1998.262725] env[69992]: DEBUG oslo_concurrency.lockutils [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1998.609423] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.765717] env[69992]: INFO nova.compute.manager [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Detaching volume 6631b867-8b09-4854-9b7f-a03cb67de5e8 [ 1998.797294] env[69992]: INFO nova.virt.block_device [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Attempting to driver detach volume 6631b867-8b09-4854-9b7f-a03cb67de5e8 from mountpoint /dev/sdb [ 1998.797520] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1998.797700] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582168', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'name': 'volume-6631b867-8b09-4854-9b7f-a03cb67de5e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db23bdaa-b33d-4897-99ce-cdaa41a1758e', 'attached_at': '', 'detached_at': '', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'serial': '6631b867-8b09-4854-9b7f-a03cb67de5e8'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1998.798615] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ac3612-c1f6-42e9-b36c-846b54fcb80d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.819649] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242d95a7-cad6-42a1-bd24-23a0aa87c076 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.825937] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b8ee3d-669b-4857-86cc-0e1cc6b219c5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.846462] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17b485b-3450-41da-8048-8cab18f9f647 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.860284] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] The volume has not been displaced from its original location: [datastore2] volume-6631b867-8b09-4854-9b7f-a03cb67de5e8/volume-6631b867-8b09-4854-9b7f-a03cb67de5e8.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1998.865419] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Reconfiguring VM instance instance-0000007f to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1998.865658] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb500bb3-861e-46e3-a3bc-412e8c785d94 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.882618] env[69992]: DEBUG oslo_vmware.api [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1998.882618] env[69992]: value = "task-2898373" [ 1998.882618] env[69992]: _type = "Task" [ 1998.882618] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.889681] env[69992]: DEBUG oslo_vmware.api [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898373, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.113156] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1999.113384] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1999.113557] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1999.113712] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1999.114605] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e271ae5-1538-4da1-9713-b62bb53c4185 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.121958] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019dfafe-ca0d-43f2-8454-e75946580915 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.135348] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45837ddc-aa55-4599-adde-7a0f95f367fd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.141094] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7497898-6ac9-4954-bfe9-40de866c3e74 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.168744] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180920MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1999.168879] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1999.169096] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1999.392627] env[69992]: DEBUG oslo_vmware.api [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898373, 'name': ReconfigVM_Task, 'duration_secs': 0.226976} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.392833] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Reconfigured VM instance instance-0000007f to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1999.397381] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ef24861-26f6-4fe2-b35c-76d405f283aa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.412628] env[69992]: DEBUG oslo_vmware.api [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 1999.412628] env[69992]: value = "task-2898374" [ 1999.412628] env[69992]: _type = "Task" [ 1999.412628] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.420739] env[69992]: DEBUG oslo_vmware.api [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.600474] env[69992]: DEBUG oslo_vmware.api [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898374, 'name': ReconfigVM_Task, 'duration_secs': 0.128797} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.600768] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582168', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'name': 'volume-6631b867-8b09-4854-9b7f-a03cb67de5e8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'db23bdaa-b33d-4897-99ce-cdaa41a1758e', 'attached_at': '', 'detached_at': '', 'volume_id': '6631b867-8b09-4854-9b7f-a03cb67de5e8', 'serial': '6631b867-8b09-4854-9b7f-a03cb67de5e8'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2000.677833] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance db23bdaa-b33d-4897-99ce-cdaa41a1758e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2000.678054] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2000.678199] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2000.704583] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ad855e-6cb0-424e-83a7-068a69f4f3ae {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.711761] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60d933e-0506-4850-aa6c-0878f957c83b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.740881] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa3cbf6-6a87-4fb3-8a4d-7e26eb466d66 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.747583] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecf0d38-1ba9-4d22-b623-e50fc225353a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.760095] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.140758] env[69992]: DEBUG nova.objects.instance [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2001.262825] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2001.264204] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2001.264389] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.095s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2002.149572] env[69992]: DEBUG oslo_concurrency.lockutils [None req-90fc5794-02f8-49c6-af31-625e992f7612 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.887s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2002.264362] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.264574] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.264722] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.469972] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2002.470204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2002.470373] env[69992]: DEBUG nova.compute.manager [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2002.471264] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba91847d-367d-4df9-a814-cfda40edec76 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.478166] env[69992]: DEBUG nova.compute.manager [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69992) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2002.478719] env[69992]: DEBUG nova.objects.instance [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.609023] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.485117] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2003.485467] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bb15e2b-a7c2-4585-a43a-794ebe66e385 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.492248] env[69992]: DEBUG oslo_vmware.api [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2003.492248] env[69992]: value = "task-2898375" [ 2003.492248] env[69992]: _type = "Task" [ 2003.492248] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.499709] env[69992]: DEBUG oslo_vmware.api [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.001885] env[69992]: DEBUG oslo_vmware.api [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898375, 'name': PowerOffVM_Task, 'duration_secs': 0.1756} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.002148] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2004.002344] env[69992]: DEBUG nova.compute.manager [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2004.003093] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0a08f5-9ea2-498b-872d-eee1b1c234ba {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.514015] env[69992]: DEBUG oslo_concurrency.lockutils [None req-f4082684-5651-4832-a5cd-bb4a1b599312 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.044s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2004.609737] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2004.609912] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 2004.840287] env[69992]: DEBUG nova.objects.instance [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2005.345231] env[69992]: DEBUG oslo_concurrency.lockutils [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.345408] env[69992]: DEBUG oslo_concurrency.lockutils [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2005.345575] env[69992]: DEBUG nova.network.neutron [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2005.345744] env[69992]: DEBUG nova.objects.instance [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'info_cache' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2005.848912] env[69992]: DEBUG nova.objects.base [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69992) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2006.538253] env[69992]: DEBUG nova.network.neutron [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [{"id": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "address": "fa:16:3e:5d:c3:32", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ed16f5a-d5", "ovs_interfaceid": "2ed16f5a-d5d8-447c-aaa2-c02231e5af66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.609498] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2006.609684] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 2007.041435] env[69992]: DEBUG oslo_concurrency.lockutils [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "refresh_cache-db23bdaa-b33d-4897-99ce-cdaa41a1758e" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2007.113208] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] There are 3 instances to clean {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 2007.113451] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: e488d413-653b-4730-b7d9-1db8320e9c6b] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 2007.617638] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 2359591e-4149-4594-bcd8-55cb74d1da24] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 2008.047213] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2008.047592] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2dc021b-890f-45b1-ad2f-ba987e557c09 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.055117] env[69992]: DEBUG oslo_vmware.api [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2008.055117] env[69992]: value = "task-2898376" [ 2008.055117] env[69992]: _type = "Task" [ 2008.055117] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.062757] env[69992]: DEBUG oslo_vmware.api [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.119488] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] [instance: 90facf1a-ae81-4259-bf75-94779267699c] Instance has had 0 of 5 cleanup attempts {{(pid=69992) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 2008.564379] env[69992]: DEBUG oslo_vmware.api [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898376, 'name': PowerOnVM_Task, 'duration_secs': 0.394756} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.564638] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2008.564838] env[69992]: DEBUG nova.compute.manager [None req-036c0d1a-7189-475a-b1a5-f0cfed87146c tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2008.565589] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d48cae2-c316-45ae-ae86-0bb410f8aa06 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.623397] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.609787] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.113523] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.113913] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Cleaning up deleted instances with incomplete migration {{(pid=69992) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 2020.108236] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.287011] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.790238] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Getting list of instances from cluster (obj){ [ 2035.790238] env[69992]: value = "domain-c8" [ 2035.790238] env[69992]: _type = "ClusterComputeResource" [ 2035.790238] env[69992]: } {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2035.791456] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06aa986e-9030-4ea1-8ae1-9603f153f9d7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.801946] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Got total of 1 instances {{(pid=69992) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2035.802098] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Triggering sync for uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 2035.802425] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2035.802642] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2035.803529] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b2e7df-5e18-4de5-bb59-89f960cfae1d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.312465] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2044.947503] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2044.947857] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2044.947981] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2044.948176] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2044.948348] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2044.951872] env[69992]: INFO nova.compute.manager [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Terminating instance [ 2045.455622] env[69992]: DEBUG nova.compute.manager [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2045.455877] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2045.456875] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992bce5e-bffe-4298-a482-541b0aab6818 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.464588] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2045.464800] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25b47c5a-24dc-4110-9420-7aec5671ce4f {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.471168] env[69992]: DEBUG oslo_vmware.api [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2045.471168] env[69992]: value = "task-2898377" [ 2045.471168] env[69992]: _type = "Task" [ 2045.471168] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.479285] env[69992]: DEBUG oslo_vmware.api [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.980767] env[69992]: DEBUG oslo_vmware.api [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898377, 'name': PowerOffVM_Task, 'duration_secs': 0.199253} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.981082] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2045.981207] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2045.981447] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afd0c34e-972b-4187-bd42-5941a5ddac9e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.055451] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2046.055671] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Deleting contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2046.055857] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Deleting the datastore file [datastore1] db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2046.056170] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ec960e2-1feb-4ed4-b83f-5da065c60ebd {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.063333] env[69992]: DEBUG oslo_vmware.api [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2046.063333] env[69992]: value = "task-2898379" [ 2046.063333] env[69992]: _type = "Task" [ 2046.063333] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.070410] env[69992]: DEBUG oslo_vmware.api [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.572275] env[69992]: DEBUG oslo_vmware.api [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149202} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.572556] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2046.572681] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Deleted contents of the VM from datastore datastore1 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2046.572852] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2046.573030] env[69992]: INFO nova.compute.manager [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2046.573323] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2046.573511] env[69992]: DEBUG nova.compute.manager [-] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2046.573602] env[69992]: DEBUG nova.network.neutron [-] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2047.020783] env[69992]: DEBUG nova.compute.manager [req-011ae485-cbda-4a9b-aea6-e8499728fcf5 req-a02bba9a-e588-47b0-ba70-a6f477d3871d service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Received event network-vif-deleted-2ed16f5a-d5d8-447c-aaa2-c02231e5af66 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 2047.021043] env[69992]: INFO nova.compute.manager [req-011ae485-cbda-4a9b-aea6-e8499728fcf5 req-a02bba9a-e588-47b0-ba70-a6f477d3871d service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Neutron deleted interface 2ed16f5a-d5d8-447c-aaa2-c02231e5af66; detaching it from the instance and deleting it from the info cache [ 2047.021257] env[69992]: DEBUG nova.network.neutron [req-011ae485-cbda-4a9b-aea6-e8499728fcf5 req-a02bba9a-e588-47b0-ba70-a6f477d3871d service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.505741] env[69992]: DEBUG nova.network.neutron [-] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.523890] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e2461ec-a06f-4662-8673-cce9af8aeec6 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.533750] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6e3d69-19fe-49c3-857e-a52b284f1794 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.557936] env[69992]: DEBUG nova.compute.manager [req-011ae485-cbda-4a9b-aea6-e8499728fcf5 req-a02bba9a-e588-47b0-ba70-a6f477d3871d service nova] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Detach interface failed, port_id=2ed16f5a-d5d8-447c-aaa2-c02231e5af66, reason: Instance db23bdaa-b33d-4897-99ce-cdaa41a1758e could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 2048.011035] env[69992]: INFO nova.compute.manager [-] [instance: db23bdaa-b33d-4897-99ce-cdaa41a1758e] Took 1.44 seconds to deallocate network for instance. [ 2048.517207] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2048.517574] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2048.517707] env[69992]: DEBUG nova.objects.instance [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'resources' on Instance uuid db23bdaa-b33d-4897-99ce-cdaa41a1758e {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2049.050977] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e02e26a-4302-49b0-b8aa-69a5185caa94 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.058562] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a03971c-03cb-4292-bded-a61cbf8e10e9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.088230] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3ba0cd-fc94-415d-a90b-2bec843dd492 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.095160] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddaec40-b140-4533-a12b-d3a296974f5c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.107715] env[69992]: DEBUG nova.compute.provider_tree [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2049.610871] env[69992]: DEBUG nova.scheduler.client.report [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2050.116075] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2050.135246] env[69992]: INFO nova.scheduler.client.report [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Deleted allocations for instance db23bdaa-b33d-4897-99ce-cdaa41a1758e [ 2050.643674] env[69992]: DEBUG oslo_concurrency.lockutils [None req-1fa85482-c0ce-417f-a2ee-728fc84f6511 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "db23bdaa-b33d-4897-99ce-cdaa41a1758e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.696s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2052.819882] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2052.820204] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2053.322501] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Starting instance... {{(pid=69992) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2053.843506] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2053.843799] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2053.845289] env[69992]: INFO nova.compute.claims [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2054.883960] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fffdc1-c92b-4469-9ec0-360c8afb5ee8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.891324] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a7fbbc-517f-49f8-9a74-6732a62b1660 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.920169] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee77d843-b0c8-4792-9634-e6f6ece5629a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.927204] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904bb08f-5da2-498c-bed4-55c5defebd2d {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.939782] env[69992]: DEBUG nova.compute.provider_tree [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2055.443154] env[69992]: DEBUG nova.scheduler.client.report [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2055.948434] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.104s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2055.948959] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Start building networks asynchronously for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2056.453906] env[69992]: DEBUG nova.compute.utils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 2056.455340] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Allocating IP information in the background. {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2056.455513] env[69992]: DEBUG nova.network.neutron [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] allocate_for_instance() {{(pid=69992) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2056.504389] env[69992]: DEBUG nova.policy [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aee5449aa2b43f9b852cae59e547f39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7e8c96af882413d886198a7b64b474f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69992) authorize /opt/stack/nova/nova/policy.py:192}} [ 2056.777893] env[69992]: DEBUG nova.network.neutron [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Successfully created port: 1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2056.959742] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Start building block device mappings for instance. {{(pid=69992) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2057.970592] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Start spawning the instance on the hypervisor. {{(pid=69992) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2057.996869] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-10T17:44:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-10T17:43:45Z,direct_url=,disk_format='vmdk',id=eb50549f-9db8-4c15-a738-0e4b1e9e33fb,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='8737009d8272416b9d9df3315d20a145',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-10T17:43:46Z,virtual_size=,visibility=), allow threads: False {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2057.997134] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Flavor limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2057.997303] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Image limits 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2057.997488] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Flavor pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2057.997635] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Image pref 0:0:0 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2057.997780] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69992) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2057.997987] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2057.998178] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2057.998352] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Got 1 possible topologies {{(pid=69992) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2057.998515] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2057.998686] env[69992]: DEBUG nova.virt.hardware [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69992) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2057.999573] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edad0238-6244-4bb9-a606-96826ccb81e3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.007667] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9a5246-3837-4af3-b67e-d210096ed4e8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.140741] env[69992]: DEBUG nova.compute.manager [req-6b970982-de21-4c7f-96a4-b95b60d491a0 req-beeed631-98a1-4792-ac8a-d08e7778db19 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Received event network-vif-plugged-1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 2058.140971] env[69992]: DEBUG oslo_concurrency.lockutils [req-6b970982-de21-4c7f-96a4-b95b60d491a0 req-beeed631-98a1-4792-ac8a-d08e7778db19 service nova] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2058.141278] env[69992]: DEBUG oslo_concurrency.lockutils [req-6b970982-de21-4c7f-96a4-b95b60d491a0 req-beeed631-98a1-4792-ac8a-d08e7778db19 service nova] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2058.141432] env[69992]: DEBUG oslo_concurrency.lockutils [req-6b970982-de21-4c7f-96a4-b95b60d491a0 req-beeed631-98a1-4792-ac8a-d08e7778db19 service nova] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2058.141553] env[69992]: DEBUG nova.compute.manager [req-6b970982-de21-4c7f-96a4-b95b60d491a0 req-beeed631-98a1-4792-ac8a-d08e7778db19 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] No waiting events found dispatching network-vif-plugged-1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2058.141708] env[69992]: WARNING nova.compute.manager [req-6b970982-de21-4c7f-96a4-b95b60d491a0 req-beeed631-98a1-4792-ac8a-d08e7778db19 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Received unexpected event network-vif-plugged-1b80eab6-fa50-4289-9c37-aceac0cee737 for instance with vm_state building and task_state spawning. [ 2058.218316] env[69992]: DEBUG nova.network.neutron [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Successfully updated port: 1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2058.609914] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.610186] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.721071] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.721241] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2058.721402] env[69992]: DEBUG nova.network.neutron [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Building network info cache for instance {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.113299] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2059.113594] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2059.113750] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2059.113912] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2059.114792] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab67478-1747-421d-8230-4ac4796fd308 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.122876] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f5fd01-4e68-4668-acdd-df6b0f812d99 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.135902] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f805db-67fb-4dcb-99f7-2ab80a7069fb {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.141653] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad015162-d08c-474d-aecb-e58c064599b0 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.170604] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180947MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2059.170745] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2059.170942] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2059.251777] env[69992]: DEBUG nova.network.neutron [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Instance cache missing network info. {{(pid=69992) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2059.362992] env[69992]: DEBUG nova.network.neutron [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating instance_info_cache with network_info: [{"id": "1b80eab6-fa50-4289-9c37-aceac0cee737", "address": "fa:16:3e:38:fe:5d", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80eab6-fa", "ovs_interfaceid": "1b80eab6-fa50-4289-9c37-aceac0cee737", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.865550] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2059.865889] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Instance network_info: |[{"id": "1b80eab6-fa50-4289-9c37-aceac0cee737", "address": "fa:16:3e:38:fe:5d", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80eab6-fa", "ovs_interfaceid": "1b80eab6-fa50-4289-9c37-aceac0cee737", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69992) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2059.866333] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:fe:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b80eab6-fa50-4289-9c37-aceac0cee737', 'vif_model': 'vmxnet3'}] {{(pid=69992) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2059.873724] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2059.873924] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Creating VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2059.874184] env[69992]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e793207-fbae-4e6e-827e-f894cf697ded {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.893844] env[69992]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2059.893844] env[69992]: value = "task-2898380" [ 2059.893844] env[69992]: _type = "Task" [ 2059.893844] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.905406] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898380, 'name': CreateVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.167362] env[69992]: DEBUG nova.compute.manager [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Received event network-changed-1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 2060.167639] env[69992]: DEBUG nova.compute.manager [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Refreshing instance network info cache due to event network-changed-1b80eab6-fa50-4289-9c37-aceac0cee737. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 2060.167889] env[69992]: DEBUG oslo_concurrency.lockutils [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] Acquiring lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.168077] env[69992]: DEBUG oslo_concurrency.lockutils [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] Acquired lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2060.168278] env[69992]: DEBUG nova.network.neutron [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Refreshing network info cache for port 1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2060.193970] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e9edec84-ade9-4eeb-88f3-5180d64af400 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2060.194179] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2060.194327] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2060.220140] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a16d8e-f837-4cfd-a40f-ccdb0f373df3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.227464] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb063f06-c927-416d-bbe9-29a58f63b822 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.258465] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41c8f55-9d1c-4e27-b35f-19cbb99dba86 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.265458] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6671d4-e2f9-415f-b973-19b7dd801bca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.278179] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2060.403813] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898380, 'name': CreateVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.781369] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2060.850635] env[69992]: DEBUG nova.network.neutron [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updated VIF entry in instance network info cache for port 1b80eab6-fa50-4289-9c37-aceac0cee737. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2060.851240] env[69992]: DEBUG nova.network.neutron [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating instance_info_cache with network_info: [{"id": "1b80eab6-fa50-4289-9c37-aceac0cee737", "address": "fa:16:3e:38:fe:5d", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80eab6-fa", "ovs_interfaceid": "1b80eab6-fa50-4289-9c37-aceac0cee737", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.904384] env[69992]: DEBUG oslo_vmware.api [-] Task: {'id': task-2898380, 'name': CreateVM_Task, 'duration_secs': 0.535867} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.904543] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Created VM on the ESX host {{(pid=69992) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2060.911011] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.911221] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2060.911530] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 2060.911768] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8066955-a4bc-4a09-89c1-7261a835e8fa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.915816] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2060.915816] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]520fed4e-dd43-e6a7-f211-b79142c10868" [ 2060.915816] env[69992]: _type = "Task" [ 2060.915816] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.922923] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520fed4e-dd43-e6a7-f211-b79142c10868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.285854] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2061.286235] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2061.353912] env[69992]: DEBUG oslo_concurrency.lockutils [req-743b9d53-e547-410e-8d4c-18bac5135d3b req-bd24e5ca-4857-42e7-afd3-2a851152de16 service nova] Releasing lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2061.426992] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]520fed4e-dd43-e6a7-f211-b79142c10868, 'name': SearchDatastore_Task, 'duration_secs': 0.010474} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.427287] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2061.427519] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Processing image eb50549f-9db8-4c15-a738-0e4b1e9e33fb {{(pid=69992) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2061.427745] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.427891] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquired lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2061.428082] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2061.428336] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cebebc68-6090-4eda-bd72-abed0be8e402 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.436427] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69992) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2061.436602] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69992) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2061.437267] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-824366b1-9210-49d4-a688-3af05894f1b9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.441730] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2061.441730] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]525c26a9-bcf0-40ab-2574-2c86e898d401" [ 2061.441730] env[69992]: _type = "Task" [ 2061.441730] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.448608] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525c26a9-bcf0-40ab-2574-2c86e898d401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.952356] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]525c26a9-bcf0-40ab-2574-2c86e898d401, 'name': SearchDatastore_Task, 'duration_secs': 0.008673} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.953140] env[69992]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8bfce49-9702-4870-bef1-864a1150344c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.957979] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2061.957979] env[69992]: value = "session[528eb7b7-6862-86e5-2686-6146916c3c70]523aad44-2b3d-df22-d4f7-9d302828652d" [ 2061.957979] env[69992]: _type = "Task" [ 2061.957979] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.966637] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523aad44-2b3d-df22-d4f7-9d302828652d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.285317] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.285566] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.285740] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.468553] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': session[528eb7b7-6862-86e5-2686-6146916c3c70]523aad44-2b3d-df22-d4f7-9d302828652d, 'name': SearchDatastore_Task, 'duration_secs': 0.009852} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.468934] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Releasing lock "[datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2062.469068] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] e9edec84-ade9-4eeb-88f3-5180d64af400/e9edec84-ade9-4eeb-88f3-5180d64af400.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2062.469295] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e72faa24-11b1-4529-ad48-1ca1ae7cecdc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.475933] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2062.475933] env[69992]: value = "task-2898381" [ 2062.475933] env[69992]: _type = "Task" [ 2062.475933] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.483493] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.609388] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.986058] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.420764} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.986058] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/eb50549f-9db8-4c15-a738-0e4b1e9e33fb/eb50549f-9db8-4c15-a738-0e4b1e9e33fb.vmdk to [datastore2] e9edec84-ade9-4eeb-88f3-5180d64af400/e9edec84-ade9-4eeb-88f3-5180d64af400.vmdk {{(pid=69992) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2062.986244] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Extending root virtual disk to 1048576 {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2062.986483] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aeff5102-d0c3-4ed7-91cb-b876391ba472 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.993313] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2062.993313] env[69992]: value = "task-2898382" [ 2062.993313] env[69992]: _type = "Task" [ 2062.993313] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.000141] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.502704] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0672} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.503108] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Extended root virtual disk {{(pid=69992) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2063.503725] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8692fa-0774-4e68-9edc-28553e5ec0c7 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.524812] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] e9edec84-ade9-4eeb-88f3-5180d64af400/e9edec84-ade9-4eeb-88f3-5180d64af400.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2063.525057] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb38706b-d566-430c-a027-8764318604dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.543741] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2063.543741] env[69992]: value = "task-2898383" [ 2063.543741] env[69992]: _type = "Task" [ 2063.543741] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.550817] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.053668] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898383, 'name': ReconfigVM_Task, 'duration_secs': 0.273387} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.053913] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfigured VM instance instance-00000080 to attach disk [datastore2] e9edec84-ade9-4eeb-88f3-5180d64af400/e9edec84-ade9-4eeb-88f3-5180d64af400.vmdk or device None with type sparse {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2064.054570] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8650ecda-b619-42b3-9ba8-ffaf1f424623 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.060656] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2064.060656] env[69992]: value = "task-2898384" [ 2064.060656] env[69992]: _type = "Task" [ 2064.060656] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.067953] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898384, 'name': Rename_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.570556] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898384, 'name': Rename_Task, 'duration_secs': 0.146525} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.570909] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Powering on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2064.571075] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-545493e0-e534-42d1-81b0-bcafd5c80290 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.577611] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2064.577611] env[69992]: value = "task-2898385" [ 2064.577611] env[69992]: _type = "Task" [ 2064.577611] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.584598] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.087567] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898385, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.588183] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898385, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.088800] env[69992]: DEBUG oslo_vmware.api [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898385, 'name': PowerOnVM_Task, 'duration_secs': 1.018524} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.089129] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Powered on the VM {{(pid=69992) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.089375] env[69992]: INFO nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Took 8.12 seconds to spawn the instance on the hypervisor. [ 2066.089572] env[69992]: DEBUG nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Checking state {{(pid=69992) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2066.090356] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f9c1b2-9d89-4262-b133-b2a200b0d654 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.607651] env[69992]: INFO nova.compute.manager [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Took 12.78 seconds to build instance. [ 2066.608849] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.609015] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 2066.823231] env[69992]: DEBUG nova.compute.manager [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Received event network-changed-1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 2066.823438] env[69992]: DEBUG nova.compute.manager [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Refreshing instance network info cache due to event network-changed-1b80eab6-fa50-4289-9c37-aceac0cee737. {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 2066.823653] env[69992]: DEBUG oslo_concurrency.lockutils [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] Acquiring lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.823817] env[69992]: DEBUG oslo_concurrency.lockutils [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] Acquired lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2066.823998] env[69992]: DEBUG nova.network.neutron [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Refreshing network info cache for port 1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2067.110327] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fe5d5e3b-3818-445b-9b6a-1357a61f94e0 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.290s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2067.526669] env[69992]: DEBUG nova.network.neutron [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updated VIF entry in instance network info cache for port 1b80eab6-fa50-4289-9c37-aceac0cee737. {{(pid=69992) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2067.527045] env[69992]: DEBUG nova.network.neutron [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating instance_info_cache with network_info: [{"id": "1b80eab6-fa50-4289-9c37-aceac0cee737", "address": "fa:16:3e:38:fe:5d", "network": {"id": "66e306e0-b31e-4353-94ff-5022400c5f7e", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1250291633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7e8c96af882413d886198a7b64b474f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80eab6-fa", "ovs_interfaceid": "1b80eab6-fa50-4289-9c37-aceac0cee737", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.030344] env[69992]: DEBUG oslo_concurrency.lockutils [req-eaa59337-9e75-4ecc-abd3-7a8a69031f30 req-8ae3d332-ae50-426e-883d-b366ff5cd8ff service nova] Releasing lock "refresh_cache-e9edec84-ade9-4eeb-88f3-5180d64af400" {{(pid=69992) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2069.609409] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2103.925585] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2103.926172] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2104.429398] env[69992]: DEBUG nova.compute.utils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 2104.932855] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2105.992847] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2105.993247] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2105.993474] env[69992]: INFO nova.compute.manager [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Attaching volume e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092 to /dev/sdb [ 2106.024132] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6638a90-58cd-4256-b2b3-125e0352096b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.030990] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d95416-63f6-4c6e-9d3a-b219883aab3c {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.044336] env[69992]: DEBUG nova.virt.block_device [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating existing volume attachment record: a1b95a55-d04f-4818-a8ae-079f8ef4a1eb {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2110.587421] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2110.587677] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582170', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'name': 'volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'serial': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2110.588571] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d416aa86-d997-4cda-932f-ce31241f1787 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.604674] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af94fdf0-a44d-430c-9f74-ac63b8a93a3e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.628310] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092/volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2110.628564] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c5522fd-9c23-4f9d-9a71-8d0e98c6a0f4 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.646015] env[69992]: DEBUG oslo_vmware.api [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2110.646015] env[69992]: value = "task-2898388" [ 2110.646015] env[69992]: _type = "Task" [ 2110.646015] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.654313] env[69992]: DEBUG oslo_vmware.api [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.155951] env[69992]: DEBUG oslo_vmware.api [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898388, 'name': ReconfigVM_Task, 'duration_secs': 0.32825} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.156236] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfigured VM instance instance-00000080 to attach disk [datastore1] volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092/volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2111.160836] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a16226d-6884-404b-8da0-304c2e52140e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.175514] env[69992]: DEBUG oslo_vmware.api [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2111.175514] env[69992]: value = "task-2898389" [ 2111.175514] env[69992]: _type = "Task" [ 2111.175514] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.182782] env[69992]: DEBUG oslo_vmware.api [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.685380] env[69992]: DEBUG oslo_vmware.api [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898389, 'name': ReconfigVM_Task, 'duration_secs': 0.183011} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.685700] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582170', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'name': 'volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'serial': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2112.720738] env[69992]: DEBUG nova.objects.instance [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid e9edec84-ade9-4eeb-88f3-5180d64af400 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2113.227195] env[69992]: DEBUG oslo_concurrency.lockutils [None req-4a9d1da0-caf2-402d-ab0b-fb241bf861c2 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.234s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2114.032707] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2114.032986] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2114.536242] env[69992]: DEBUG nova.compute.utils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Using /dev/sd instead of None {{(pid=69992) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 2115.039421] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2116.092610] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2116.093034] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2116.093127] env[69992]: INFO nova.compute.manager [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Attaching volume 221cdd68-7594-4af3-9875-34a293b68723 to /dev/sdc [ 2116.123159] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f4ed72-2c82-46ca-9770-0a7775025cd8 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.130338] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544d8b0c-34d7-412f-9d86-7361965c59b5 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.143459] env[69992]: DEBUG nova.virt.block_device [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating existing volume attachment record: 2011cd78-9aeb-44ea-bb51-b48d927889f3 {{(pid=69992) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2119.609280] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2119.609660] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.113181] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2120.113457] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2120.113638] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2120.113793] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69992) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2120.114689] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675d1bca-c6c5-4271-86a8-5e52489a4b4e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.122886] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7a0321-5d42-45b8-ae95-b7e4ba1e9068 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.136663] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fac8ac1-0af2-4bda-a9c9-82e00af4910e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.142610] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726ba46c-bfe2-4557-894b-94ab1d9f91dc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.171713] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180900MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=69992) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2120.171853] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2120.172104] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2120.686264] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Volume attach. Driver type: vmdk {{(pid=69992) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2120.686525] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582171', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'name': 'volume-221cdd68-7594-4af3-9875-34a293b68723', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'serial': '221cdd68-7594-4af3-9875-34a293b68723'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2120.687412] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2062022-6913-4a49-8a59-bf36fc88345a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.705068] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546d26fc-b72f-493e-8d20-3de3b7c0f811 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.732159] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] volume-221cdd68-7594-4af3-9875-34a293b68723/volume-221cdd68-7594-4af3-9875-34a293b68723.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2120.732384] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6090a7b-4aeb-4b83-9265-80390997ee51 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.749899] env[69992]: DEBUG oslo_vmware.api [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2120.749899] env[69992]: value = "task-2898392" [ 2120.749899] env[69992]: _type = "Task" [ 2120.749899] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.757396] env[69992]: DEBUG oslo_vmware.api [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.196972] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Instance e9edec84-ade9-4eeb-88f3-5180d64af400 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69992) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2121.197594] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2121.197594] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69992) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2121.222290] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b76afe5-9f56-42ca-b398-888063474bfc {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.229591] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b6b1ca-16f5-44d5-927e-a2d5619d76d1 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.262527] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b987bb-d737-493f-8675-a72082378e30 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.269006] env[69992]: DEBUG oslo_vmware.api [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898392, 'name': ReconfigVM_Task, 'duration_secs': 0.329216} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.270911] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfigured VM instance instance-00000080 to attach disk [datastore1] volume-221cdd68-7594-4af3-9875-34a293b68723/volume-221cdd68-7594-4af3-9875-34a293b68723.vmdk or device None with type thin {{(pid=69992) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2121.275498] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-730ed0bb-d981-4a80-b0c1-1e02a5945aaa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.285591] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ed813c-8443-49ed-9828-eb2a877392da {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.299135] env[69992]: DEBUG nova.compute.provider_tree [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2121.301295] env[69992]: DEBUG oslo_vmware.api [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2121.301295] env[69992]: value = "task-2898393" [ 2121.301295] env[69992]: _type = "Task" [ 2121.301295] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.309626] env[69992]: DEBUG oslo_vmware.api [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898393, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.806170] env[69992]: DEBUG nova.scheduler.client.report [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2121.815008] env[69992]: DEBUG oslo_vmware.api [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898393, 'name': ReconfigVM_Task, 'duration_secs': 0.146873} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.815305] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582171', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'name': 'volume-221cdd68-7594-4af3-9875-34a293b68723', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'serial': '221cdd68-7594-4af3-9875-34a293b68723'} {{(pid=69992) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2122.311311] env[69992]: DEBUG nova.compute.resource_tracker [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69992) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2122.311568] env[69992]: DEBUG oslo_concurrency.lockutils [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.139s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2122.851535] env[69992]: DEBUG nova.objects.instance [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid e9edec84-ade9-4eeb-88f3-5180d64af400 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.312073] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.312289] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.357250] env[69992]: DEBUG oslo_concurrency.lockutils [None req-2319f3b9-d9d1-45ae-b994-3de4005d1031 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.264s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2123.605517] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.609170] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.622757] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2123.622995] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2124.126928] env[69992]: INFO nova.compute.manager [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Detaching volume e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092 [ 2124.157049] env[69992]: INFO nova.virt.block_device [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Attempting to driver detach volume e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092 from mountpoint /dev/sdb [ 2124.157292] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2124.157476] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582170', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'name': 'volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'serial': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2124.158363] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8788619-2d4d-4baa-b60a-4e9b004fa056 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.181839] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b3861e-7c1b-4cc9-a26c-b43a3236d49a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.188101] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79eccdfd-fd81-48d9-9261-521f865c475e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.209661] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a464d90e-04b6-4ba8-9b44-1046d2bc2804 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.225215] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] The volume has not been displaced from its original location: [datastore1] volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092/volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2124.230177] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfiguring VM instance instance-00000080 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2124.230408] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f9335b6-0a65-4052-9b79-194f30fd2262 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.247558] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2124.247558] env[69992]: value = "task-2898394" [ 2124.247558] env[69992]: _type = "Task" [ 2124.247558] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.254509] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.757818] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898394, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.257796] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898394, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.758690] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898394, 'name': ReconfigVM_Task, 'duration_secs': 1.226445} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.758953] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfigured VM instance instance-00000080 to detach disk 2001 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2125.763493] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9a7271c-74c8-4fcf-9ca2-918fb431ebfa {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.777931] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2125.777931] env[69992]: value = "task-2898395" [ 2125.777931] env[69992]: _type = "Task" [ 2125.777931] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.785181] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.287632] env[69992]: DEBUG oslo_vmware.api [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898395, 'name': ReconfigVM_Task, 'duration_secs': 0.145366} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.287943] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582170', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'name': 'volume-e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092', 'serial': 'e965ff3e-e54b-4ff1-a8bd-3d2fa6c84092'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2126.829780] env[69992]: DEBUG nova.objects.instance [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid e9edec84-ade9-4eeb-88f3-5180d64af400 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2127.609073] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.609408] env[69992]: DEBUG nova.compute.manager [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69992) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 2127.836964] env[69992]: DEBUG oslo_concurrency.lockutils [None req-fc682eb4-ee79-46ff-a26e-017f25c1d661 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.214s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2127.881897] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2127.882124] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2128.385538] env[69992]: INFO nova.compute.manager [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Detaching volume 221cdd68-7594-4af3-9875-34a293b68723 [ 2128.415034] env[69992]: INFO nova.virt.block_device [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Attempting to driver detach volume 221cdd68-7594-4af3-9875-34a293b68723 from mountpoint /dev/sdc [ 2128.415034] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Volume detach. Driver type: vmdk {{(pid=69992) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2128.415216] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582171', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'name': 'volume-221cdd68-7594-4af3-9875-34a293b68723', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'serial': '221cdd68-7594-4af3-9875-34a293b68723'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2128.416135] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb03400d-2fdb-41b3-a58c-29d1f5c15581 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.438935] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3d3949-f3eb-4132-8cf5-8618777f0d0a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.445566] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be36c7b7-b647-420a-83dc-5f04d216ef47 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.464846] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ec535a-763b-40f7-9b1b-7cc4ad4bcda3 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.479618] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] The volume has not been displaced from its original location: [datastore1] volume-221cdd68-7594-4af3-9875-34a293b68723/volume-221cdd68-7594-4af3-9875-34a293b68723.vmdk. No consolidation needed. {{(pid=69992) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2128.484765] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfiguring VM instance instance-00000080 to detach disk 2002 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2128.484982] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e444c93-ce34-4e13-9696-f8d9365df0db {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.501382] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2128.501382] env[69992]: value = "task-2898396" [ 2128.501382] env[69992]: _type = "Task" [ 2128.501382] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.508382] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.011427] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898396, 'name': ReconfigVM_Task, 'duration_secs': 0.205619} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.011817] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Reconfigured VM instance instance-00000080 to detach disk 2002 {{(pid=69992) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2129.016324] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9e15a47-4104-4b44-b7c2-5180104dc4ca {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.031132] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2129.031132] env[69992]: value = "task-2898397" [ 2129.031132] env[69992]: _type = "Task" [ 2129.031132] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.038707] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.540552] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.040433] env[69992]: DEBUG oslo_vmware.api [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898397, 'name': ReconfigVM_Task, 'duration_secs': 0.744594} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.040851] env[69992]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-582171', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'name': 'volume-221cdd68-7594-4af3-9875-34a293b68723', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e9edec84-ade9-4eeb-88f3-5180d64af400', 'attached_at': '', 'detached_at': '', 'volume_id': '221cdd68-7594-4af3-9875-34a293b68723', 'serial': '221cdd68-7594-4af3-9875-34a293b68723'} {{(pid=69992) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2130.580220] env[69992]: DEBUG nova.objects.instance [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'flavor' on Instance uuid e9edec84-ade9-4eeb-88f3-5180d64af400 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2131.588161] env[69992]: DEBUG oslo_concurrency.lockutils [None req-6cb38fdf-e3d7-41c9-b6b8-9af1e460d689 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.706s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2131.609382] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.250354] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2132.250633] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2132.250851] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "e9edec84-ade9-4eeb-88f3-5180d64af400-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2132.251056] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2132.251236] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2132.253515] env[69992]: INFO nova.compute.manager [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Terminating instance [ 2132.757162] env[69992]: DEBUG nova.compute.manager [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Start destroying the instance on the hypervisor. {{(pid=69992) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2132.757450] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Destroying instance {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2132.758292] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f92dee-5af0-48bc-9328-8bb4f50a70ef {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.766116] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Powering off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2132.766349] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4aac1c65-613a-4770-b94c-43af72f5d96b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.772177] env[69992]: DEBUG oslo_vmware.api [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2132.772177] env[69992]: value = "task-2898398" [ 2132.772177] env[69992]: _type = "Task" [ 2132.772177] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.779972] env[69992]: DEBUG oslo_vmware.api [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.282082] env[69992]: DEBUG oslo_vmware.api [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898398, 'name': PowerOffVM_Task, 'duration_secs': 0.190693} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.282342] env[69992]: DEBUG nova.virt.vmwareapi.vm_util [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Powered off the VM {{(pid=69992) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2133.282512] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Unregistering the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2133.282752] env[69992]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfdb5e5f-920e-48ce-b056-63919fbe0995 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.350816] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Unregistered the VM {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2133.351068] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Deleting contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2133.351260] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Deleting the datastore file [datastore2] e9edec84-ade9-4eeb-88f3-5180d64af400 {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2133.351519] env[69992]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c18ef02-2759-4897-bf8f-41c3c54d02c9 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.358165] env[69992]: DEBUG oslo_vmware.api [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for the task: (returnval){ [ 2133.358165] env[69992]: value = "task-2898400" [ 2133.358165] env[69992]: _type = "Task" [ 2133.358165] env[69992]: } to complete. {{(pid=69992) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.365683] env[69992]: DEBUG oslo_vmware.api [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.867832] env[69992]: DEBUG oslo_vmware.api [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Task: {'id': task-2898400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144532} completed successfully. {{(pid=69992) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.868193] env[69992]: DEBUG nova.virt.vmwareapi.ds_util [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Deleted the datastore file {{(pid=69992) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2133.868275] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Deleted contents of the VM from datastore datastore2 {{(pid=69992) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2133.868450] env[69992]: DEBUG nova.virt.vmwareapi.vmops [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Instance destroyed {{(pid=69992) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2133.868627] env[69992]: INFO nova.compute.manager [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2133.868870] env[69992]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69992) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2133.869084] env[69992]: DEBUG nova.compute.manager [-] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Deallocating network for instance {{(pid=69992) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2133.869178] env[69992]: DEBUG nova.network.neutron [-] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] deallocate_for_instance() {{(pid=69992) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2134.325758] env[69992]: DEBUG nova.compute.manager [req-1614354a-7c92-4bb7-9a31-de229fa594ca req-6f640829-2593-447e-998c-d210a5d60508 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Received event network-vif-deleted-1b80eab6-fa50-4289-9c37-aceac0cee737 {{(pid=69992) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 2134.326030] env[69992]: INFO nova.compute.manager [req-1614354a-7c92-4bb7-9a31-de229fa594ca req-6f640829-2593-447e-998c-d210a5d60508 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Neutron deleted interface 1b80eab6-fa50-4289-9c37-aceac0cee737; detaching it from the instance and deleting it from the info cache [ 2134.326223] env[69992]: DEBUG nova.network.neutron [req-1614354a-7c92-4bb7-9a31-de229fa594ca req-6f640829-2593-447e-998c-d210a5d60508 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.775060] env[69992]: DEBUG nova.network.neutron [-] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Updating instance_info_cache with network_info: [] {{(pid=69992) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.828782] env[69992]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f88228a8-f2f9-4341-a26d-79117cc49c8b {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.838601] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4251dd-8d1c-4685-82d6-2b125cc29c53 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.864200] env[69992]: DEBUG nova.compute.manager [req-1614354a-7c92-4bb7-9a31-de229fa594ca req-6f640829-2593-447e-998c-d210a5d60508 service nova] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Detach interface failed, port_id=1b80eab6-fa50-4289-9c37-aceac0cee737, reason: Instance e9edec84-ade9-4eeb-88f3-5180d64af400 could not be found. {{(pid=69992) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 2135.279398] env[69992]: INFO nova.compute.manager [-] [instance: e9edec84-ade9-4eeb-88f3-5180d64af400] Took 1.41 seconds to deallocate network for instance. [ 2135.786377] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2135.786673] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2135.786896] env[69992]: DEBUG nova.objects.instance [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lazy-loading 'resources' on Instance uuid e9edec84-ade9-4eeb-88f3-5180d64af400 {{(pid=69992) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2136.321406] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4518cb7-a1d9-4951-b3df-60548e88295e {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.328642] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cad386-b932-49b7-bdfe-22b792c373a2 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.357855] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7b0931-4845-4c64-bdb0-3b1695598a55 {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.364371] env[69992]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2372120f-37e7-4e88-afa3-92b27861981a {{(pid=69992) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.376771] env[69992]: DEBUG nova.compute.provider_tree [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed in ProviderTree for provider: 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 {{(pid=69992) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2136.879898] env[69992]: DEBUG nova.scheduler.client.report [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Inventory has not changed for provider 9dc5dd7f-a3af-48a9-a04e-f6c1d333da28 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69992) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2137.385100] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2137.404970] env[69992]: INFO nova.scheduler.client.report [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Deleted allocations for instance e9edec84-ade9-4eeb-88f3-5180d64af400 [ 2137.914264] env[69992]: DEBUG oslo_concurrency.lockutils [None req-34851775-25b3-4a2d-9450-8f1332f9a4d5 tempest-AttachVolumeTestJSON-502636254 tempest-AttachVolumeTestJSON-502636254-project-member] Lock "e9edec84-ade9-4eeb-88f3-5180d64af400" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.663s {{(pid=69992) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2142.604977] env[69992]: DEBUG oslo_service.periodic_task [None req-df32b9c3-b9a2-4469-988a-0135d23dd962 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69992) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}